code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
# Created by Thomas Jones on 06/11/15 - thomas@tomtecsolutions.com
# branding.py, a plugin for minqlx to brand your server.
# This plugin is released to everyone, for any purpose. It comes with no warranty, no guarantee it works, it's released AS IS.
# You can modify everything, except for lines 1-4 and the !tomtec_versions code. They're there to indicate I whacked this together originally. Please make it better :D
"""
Branding.py is a minqlx plugin that permits you to personalise your server with your own information.
Simply put the plugin in the 'minqlx-plugins' folder, !load the plugin, and set these cvars:
qlx_serverBrandName - Where the map name usually appears, the text set in this cvar will appear instead.
qlx_serverBrandTopField - Where the map author credit (line 1) appears, the text set in this cvar will appear after the credit.
qlx_serverBrandBottomField - Where the map author credit (line 2) appears, the text set in this cvar will appear after the credit.
qlx_connectMessage - When the player is at the awaiting challenge screen when they first connect to the server, text will appear here.
qlx_loadedMessage - When the player gets to the menu after connecting, and clicks Join or Spectate, they'll get centre print from this cvar.
qlx_countdownMessage - When the countdown begins, this text will appear mid-screen. (like the qlx_loadedMessage does)
qlx_endOfGameMessage - When the game finishes, it'll put the text in this cvar in the text box on the left.
qlx_brandingPrependMapName - This cvar will put the map name before your qlx_serverBrandName. Default: 0
qlx_brandingAppendGameType - Will add the game type after your qlx_serverBrandName. Default: 0
qlx_rainbowBrandName - Make the entire map name (qlx_serverBrandName) appear in rainbow colouring. Default: 0
Once set, change maps, and you'll see the map loading screen is changed.
"""
import minqlx
class branding(minqlx.Plugin):
def __init__(self):
self.add_hook("new_game", self.brand_map)
self.add_hook("player_connect", self.player_connect)
self.add_hook("player_loaded", self.player_loaded)
self.add_hook("game_countdown", self.game_countdown)
self.add_hook("game_end", self.game_end)
self.add_command("tomtec_versions", self.cmd_showversion)
self.set_cvar_once("qlx_brandingPrependMapName", "0")
self.set_cvar_once("qlx_brandingAppendGameType", "0")
self.set_cvar_once("qlx_rainbowBrandName", "0")
self.plugin_version = "2.1"
self.playerConnectedYetList = []
def brand_map(self):
if self.get_cvar("qlx_serverBrandName") == None:
self.set_cvar("qlx_serverBrandName", self.game.map_title)
if self.get_cvar("qlx_brandingPrependMapName", bool):
topBranding = self.game.map_title + " " + self.get_cvar("qlx_serverBrandName")
else:
topBranding = self.get_cvar("qlx_serverBrandName")
if self.get_cvar("qlx_brandingAppendGameType", bool):
minqlx.set_configstring(3, topBranding + " " + self.game.type)
else:
minqlx.set_configstring(3, topBranding)
if self.get_cvar("qlx_serverBrandTopField") != None:
cs = self.game.map_subtitle1
if cs:
cs += " - "
minqlx.set_configstring(678, cs + (self.get_cvar("qlx_serverBrandTopField")))
if self.get_cvar("qlx_serverBrandBottomField") != None:
cs = self.game.map_subtitle2
if cs:
cs += " - "
minqlx.set_configstring(679, cs + (self.get_cvar("qlx_serverBrandBottomField")))
if self.get_cvar("qlx_rainbowBrandName", bool):
# Thanks Mino for this bit!
def rotating_colors():
i = 0
while True:
res = (i % 7) + 1
i += 1
yield res
map_name = self.clean_text(minqlx.get_configstring(3))
r = rotating_colors()
res = ""
for i in range(len(map_name)):
res += "^{}{}".format(next(r), map_name[i])
minqlx.set_configstring(3, res)
def player_connect(self, player):
if self.get_cvar("qlx_connectMessage") != None:
if player not in self.playerConnectedYetList:
self.playerConnectedYetList.append(player)
return "{}\n^7This server is running ^4branding.py^7. ^2http://github.com/tjone270/Quake-Live^7.\n".format(self.get_cvar("qlx_connectMessage"))
def player_loaded(self, player):
if self.get_cvar("qlx_loadedMessage") != None:
self.center_print(self.get_cvar("qlx_loadedMessage"), player.id)
try:
self.playerConnectedYetList.remove(player)
except:
return
def game_countdown(self):
if self.get_cvar("qlx_countdownMessage") != None:
self.center_print(self.get_cvar("qlx_countdownMessage"))
def game_end(self, data):
if self.get_cvar("qlx_endOfGameMessage") != None:
self.msg(self.get_cvar("qlx_endOfGameMessage"))
def cmd_showversion(self, player, msg, channel):
channel.reply("^4branding.py^7 - version {}, created by Thomas Jones on 06/11/2015.".format(self.plugin_version))
| [
[
[
2136,
2142
],
[
2159,
2165
],
[
3301,
3307
],
[
3390,
3396
],
[
3592,
3598
],
[
3835,
3841
],
[
4233,
4239
],
[
4432,
4438
]
],
[
[
2150,
2158
]
]
] |
import os
from django.utils.translation import gettext_lazy as _
######################
# CARTRIDGE SETTINGS #
######################
# The following settings are already defined in cartridge.shop.defaults
# with default values, but are common enough to be put here, commented
# out, for conveniently overriding. Please consult the settings
# documentation for a full list of settings Cartridge implements:
# http://cartridge.jupo.org/configuration.html#default-settings
# Sequence of available credit card types for payment.
# SHOP_CARD_TYPES = ("Mastercard", "Visa", "Diners", "Amex")
# Setting to turn on featured images for shop categories. Defaults to False.
# SHOP_CATEGORY_USE_FEATURED_IMAGE = True
# If True, the checkout process is split into separate
# billing/shipping and payment steps.
# SHOP_CHECKOUT_STEPS_SPLIT = True
# If True, the checkout process has a final confirmation step before
# completion.
# SHOP_CHECKOUT_STEPS_CONFIRMATION = True
# Controls the formatting of monetary values accord to the locale
# module in the python standard library. If an empty string is
# used, will fall back to the system's locale.
# SHOP_CURRENCY_LOCALE = ""
# Dotted package path and name of the function that
# is called on submit of the billing/shipping checkout step. This
# is where shipping calculation can be performed and set using the
# function ``cartridge.shop.utils.set_shipping``.
# SHOP_HANDLER_BILLING_SHIPPING = \
# "cartridge.shop.checkout.default_billship_handler"
# Dotted package path and name of the function that
# is called once an order is successful and all of the order
# object's data has been created. This is where any custom order
# processing should be implemented.
# SHOP_HANDLER_ORDER = "cartridge.shop.checkout.default_order_handler"
# Dotted package path and name of the function that
# is called on submit of the payment checkout step. This is where
# integration with a payment gateway should be implemented.
# SHOP_HANDLER_PAYMENT = "cartridge.shop.checkout.default_payment_handler"
# Sequence of value/name pairs for order statuses.
# SHOP_ORDER_STATUS_CHOICES = (
# (1, "Unprocessed"),
# (2, "Processed"),
# )
# Sequence of value/name pairs for types of product options,
# eg Size, Colour. NOTE: Increasing the number of these will
# require database migrations!
# SHOP_OPTION_TYPE_CHOICES = (
# (1, "Size"),
# (2, "Colour"),
# )
# Sequence of indexes from the SHOP_OPTION_TYPE_CHOICES setting that
# control how the options should be ordered in the admin,
# eg for "Colour" then "Size" given the above:
# SHOP_OPTION_ADMIN_ORDER = (2, 1)
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for conveniently
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost",
# "generic.ThreadedComment", (_("Media Library"), "media-library"),)),
# (_("Shop"), ("shop.Product", "shop.ProductOption", "shop.DiscountCode",
# "shop.Sale", "shop.Order")),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
# )
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
# PAGE_MENU_TEMPLATES = (
# (1, _("Top navigation bar"), "pages/menus/dropdown.html"),
# (2, _("Left-hand tree"), "pages/menus/tree.html"),
# (3, _("Footer"), "pages/menus/footer.html"),
# )
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# (_("Image"),),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# (_("Another name"),),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the django-modeltranslation will be added to the
# INSTALLED_APPS setting.
USE_MODELTRANSLATION = False
########################
# MAIN DJANGO SETTINGS #
########################
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = "UTC"
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# Supported languages
LANGUAGES = (("en", _("English")),)
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
#############
# DATABASES #
#############
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.",
# DB name or path to database file if using sqlite3.
"NAME": "",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#########
# PATHS #
#########
# Full filesystem path to the project.
PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_APP = os.path.basename(PROJECT_APP_PATH)
PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = "/media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, MEDIA_URL.strip("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_APP
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(PROJECT_ROOT, "templates")],
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.static",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"mezzanine.conf.context_processors.settings",
"mezzanine.pages.context_processors.page",
],
"loaders": [
"mezzanine.template.loaders.host_themes.Loader",
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
},
},
]
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.messages",
"django.contrib.staticfiles",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"cartridge.shop",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.galleries",
# "mezzanine.twitter",
# "mezzanine.accounts",
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
# Uncomment if using internationalisation or localisation
# 'django.middleware.locale.LocaleMiddleware',
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"cartridge.shop.middleware.ShopMiddleware",
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
)
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
# Instead of doing "from .local_settings import *", we use exec so that
# local_settings has full access to everything defined in this module.
# Also force into sys.modules so it's visible to Django's autoreload.
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
import imp
import sys
module_name = "%s.local_settings" % PROJECT_APP
module = imp.new_module(module_name)
module.__file__ = f
sys.modules[module_name] = module
exec(open(f, "rb").read())
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been defined so far, in
# order to provide some better defaults where applicable.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
| [
[
[
7,
9
],
[
8265,
8267
],
[
8281,
8283
],
[
8322,
8324
],
[
8383,
8385
],
[
8997,
8999
],
[
9381,
9383
],
[
9653,
9655
],
[
13337,
13339
],
[
13392,
13394
]
],
[
[
48,
65
],
[
6838,
6839
]
],
[
[
5847,
5867
]
],
[
[
6104,
6117
]
],
[
[
6542,
6551
]
],
[
[
6630,
6636
]
],
[
[
6774,
6787
]
],
[
[
6818,
6827
]
],
[
[
7058,
7063
]
],
[
[
7147,
7178
]
],
[
[
7187,
7194
]
],
[
[
7320,
7328
]
],
[
[
7338,
7361
]
],
[
[
7529,
7552
]
],
[
[
7562,
7580
]
],
[
[
7660,
7669
]
],
[
[
8246,
8262
],
[
8339,
8355
],
[
8399,
8415
],
[
13350,
13366
]
],
[
[
8308,
8319
],
[
8611,
8622
],
[
9533,
9544
],
[
13482,
13493
]
],
[
[
8357,
8369
],
[
9010,
9022
],
[
9394,
9406
],
[
9666,
9678
]
],
[
[
8372,
8380
]
],
[
[
8581,
8608
]
],
[
[
8702,
8712
],
[
9024,
9034
]
],
[
[
8983,
8994
]
],
[
[
9214,
9223
],
[
9408,
9417
]
],
[
[
9368,
9378
]
],
[
[
9506,
9518
]
],
[
[
9546,
9555
]
],
[
[
10687,
10701
]
],
[
[
11469,
11479
]
],
[
[
12521,
12545
],
[
12839,
12863
]
],
[
[
12567,
12589
],
[
12869,
12891
]
],
[
[
12753,
12766
]
],
[
[
13333,
13334
],
[
13407,
13408
],
[
13557,
13558
],
[
13611,
13612
]
],
[
[
13422,
13425
],
[
13507,
13510
]
],
[
[
13437,
13440
],
[
13563,
13566
]
],
[
[
13446,
13457
],
[
13522,
13533
],
[
13575,
13586
]
],
[
[
13498,
13504
],
[
13539,
13545
],
[
13590,
13596
]
],
[
[
13882,
13902
],
[
13942,
13962
]
]
] |
# HitObject class
class HitObject:
def __init__(self, start_x, start_y, end_x, end_y, time, object_type):
self.start_x = start_x
self.start_y = start_y
self.end_x = end_x
self.end_y = end_y
self.time = time
self.object_type = object_type # hit_circle, even_repeat_slider, odd_repeat_slider, spinner
# Finds the line number in which the hit objects start.
def find_start(lines):
line_number = 0
for x in lines:
if x == "[HitObjects]":
return line_number + 1
line_number += 1
# Converts a line from .osu file into HitObject.
def convert_hit_object(line):
split_line = line.split(",")
start_x = int(split_line[0])
start_y = int(split_line[1])
end_x = int(split_line[0])
end_y = int(split_line[1])
time = int(split_line[2])
if int(split_line[3]) & 0b1:
object_type = "hit_circle"
elif int(split_line[3]) & 0b1000:
object_type = "spinner"
elif int(split_line[6]) % 2 == 0:
object_type = "even_repeat_slider"
else:
object_type = "odd_repeat_slider"
slider_point_list = split_line[5].split("|")
end_point = slider_point_list[-1].split(":")
end_x = int(end_point[0])
end_y = int(end_point[1])
return HitObject(start_x, start_y, end_x, end_y, time, object_type)
# Finds distance snap by multiplying distance and time of two objects.
def calculate_distance_snap(first_object, second_object):
first_x = first_object.end_x
first_y = first_object.end_y
first_time = first_object.time
second_x = second_object.start_x
second_y = second_object.start_y
second_time = second_object.time
difference_x = abs(first_x - second_x)
difference_y = abs(first_y - second_y)
difference_time = second_time - first_time
calculation_time = difference_time
if difference_time < 100: # 2x bonus for objects unsingletappable (Detected as streams)
calculation_time = difference_time / 2.0
elif difference_time < 120: # For the grey spot around 300bpm which can be either jumps or streams.
calculation_time = difference_time / (((120 - difference_time) ** 2) / 400.0 + 1)
calculation_time = 1.0 / calculation_time
# 1/time has to be used for calculation as smaller time difference means bigger distance snap.
distance = (difference_x ** 2 + difference_y ** 2) ** 0.5
return distance * calculation_time
# Calculates weighting of objects.
def calculate_weighting(average_distance, max_distance, distance_snap):
second_half = max_distance - average_distance # used to calculate distance snap above the average
if distance_snap < average_distance:
raw_weight = (distance_snap / average_distance) / 2.0 # this is the raw weighting, range from 0 to 1
# if distance snap is under the average, put it somewhere between 0 and 0.5
else:
raw_weight = ((distance_snap - average_distance) / second_half) / 2.0 + 0.5
# if distance snap is above average, put it somewhere between 0.5 and 1
# spacing below ~0.67 is weighted just as much as spacing above it, so only relatively
# BIG jumps will make much of a difference
print (raw_weight * 1.5) ** 1.7
return (raw_weight * 1.5) ** 1.7
# Calculates nerf/buff based on percentage change from old objects.
def calculate_percentage_change(old_percentage):
if old_percentage < 0.65:
# Nerf all maps which reach under 65%.
# 55% would get around 5% nerf, while 50% would get around 10% nerf.
return 1 - (((0.65 - old_percentage) ** 1.5) / 0.524)
else:
return 1
| [
[
[
29,
38
],
[
1349,
1358
]
],
[
[
434,
444
]
],
[
[
653,
671
]
],
[
[
1495,
1518
]
],
[
[
2589,
2608
]
],
[
[
3475,
3502
]
]
] |
r"""
Isomorphisms between Weierstrass models of elliptic curves
AUTHORS:
- Robert Bradshaw (2007): initial version
- John Cremona (Jan 2008): isomorphisms, automorphisms and twists
in all characteristics
"""
#*****************************************************************************
# Copyright (C) 2007 Robert Bradshaw <robertwb@math.washington.edu>
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.categories.morphism import Morphism
from constructor import EllipticCurve
from sage.categories.homset import Hom
class baseWI:
r"""
This class implements the basic arithmetic of isomorphisms between
Weierstrass models of elliptic curves. These are specified by
lists of the form `[u,r,s,t]` (with `u\not=0`) which specifies a
transformation `(x,y) \mapsto (x',y')` where
`(x,y) = (u^2x'+r , u^3y' + su^2x' + t).`
INPUT:
- ``u,r,s,t`` (default (1,0,0,0)) -- standard parameters of an isomorphism between Weierstrass models.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: baseWI()
(1, 0, 0, 0)
sage: baseWI(2,3,4,5)
(2, 3, 4, 5)
sage: R.<u,r,s,t>=QQ[]; baseWI(u,r,s,t)
(u, r, s, t)
"""
def __init__(self, u=1, r=0, s=0, t=0):
r"""
Constructor: check for valid parameters (defaults to identity)
INPUT:
- ``u,r,s,t`` (default (1,0,0,0)) -- standard parameters of an isomorphism between Weierstrass models.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: baseWI()
(1, 0, 0, 0)
sage: baseWI(2,3,4,5)
(2, 3, 4, 5)
sage: R.<u,r,s,t>=QQ[]; baseWI(u,r,s,t)
(u, r, s, t)
"""
if u==0:
raise ValueError("u!=0 required for baseWI")
self.u=u; self.r=r; self.s=s; self.t=t
def __cmp__(self, other):
"""
Standard comparison function.
The ordering is just lexicographic on the tuple `(u,r,s,t)`.
.. note::
In a list of automorphisms, there is no guarantee that the
identity will be first!
EXAMPLE::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: baseWI(1,2,3,4)==baseWI(1,2,3,4)
True
sage: baseWI(1,2,3,4)<baseWI(1,2,3,5)
True
sage: baseWI(1,2,3,4)>baseWI(1,2,3,4)
False
::
It will never return equality if other is of another type:
sage: baseWI() == 1
False
"""
if not isinstance(other, baseWI):
return cmp(type(self), type(other))
return cmp(self.tuple(), other.tuple())
def tuple(self):
r"""
Returns the parameters `u,r,s,t` as a tuple.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: u,r,s,t=baseWI(2,3,4,5).tuple()
sage: w=baseWI(2,3,4,5)
sage: u,r,s,t=w.tuple()
sage: u
2
"""
return (self.u,self.r,self.s,self.t)
def __mul__(self, other):
r"""
Returns the Composition of this isomorphism and another.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: baseWI(1,2,3,4)*baseWI(5,6,7,8)
(5, 56, 22, 858)
sage: baseWI()*baseWI(1,2,3,4)*baseWI()
(1, 2, 3, 4)
"""
u1,r1,s1,t1=other.tuple()
u2,r2,s2,t2=self.tuple()
return baseWI(u1*u2,(u1**2)*r2+r1,u1*s2+s1,(u1**3)*t2+s1*(u1**2)*r2+t1)
def __invert__(self):
r"""
Returns the inverse of this isomorphism.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: w=baseWI(2,3,4,5)
sage: ~w
(1/2, -3/4, -2, 7/8)
sage: w*~w
(1, 0, 0, 0)
sage: ~w*w
(1, 0, 0, 0)
sage: R.<u,r,s,t>=QQ[]; w=baseWI(u,r,s,t)
sage: ~w
(1/u, (-r)/u^2, (-s)/u, (r*s - t)/u^3)
sage: ~w*w
(1, 0, 0, 0)
"""
u,r,s,t=self.tuple()
return baseWI(1/u,-r/(u**2),-s/u,(r*s-t)/(u**3))
def __repr__(self):
r"""
Returns the string representation of this isomorphism.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: baseWI(2,3,4,5)
(2, 3, 4, 5)
"""
return self.tuple().__repr__()
def is_identity(self):
r"""
Returns True if this is the identity isomorphism.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: w=baseWI(); w.is_identity()
True
sage: w=baseWI(2,3,4,5); w.is_identity()
False
"""
return self.tuple()==(1,0,0,0)
def __call__(self, EorP):
r"""
Base application of isomorphisms to curves and points: a
baseWI `w` may be applied to a list `[a1,a2,a3,a4,a6]`
representing the `a`-invariants of an elliptic curve `E`,
returning the `a`-invariants of `w(E)`; or to `P=[x,y]` or
`P=[x,y,z]` representing a point in `\mathbb{A}^2` or
`\mathbb{P}^2`, returning the transformed point.
INPUT:
- ``EorP`` -- either an elliptic curve, or a point on an elliptic curve.
OUTPUT:
The transformed curve or point.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: E=EllipticCurve([0,0,1,-7,6])
sage: w=baseWI(2,3,4,5);
sage: w(E.ainvs())
[4, -7/4, 11/8, -3/2, -9/32]
sage: P=E(-2,3)
sage: w(P.xy())
[-5/4, 9/4]
sage: EllipticCurve(w(E.ainvs()))(w(P.xy()))
(-5/4 : 9/4 : 1)
"""
u,r,s,t=self.tuple()
if len(EorP)==5:
a1,a2,a3,a4,a6=EorP
a6 += r*(a4 + r*(a2 + r)) - t*(a3 + r*a1 + t);
a4 += -s*a3 + 2*r*a2 - (t + r*s)*a1 + 3*r*r - 2*s*t;
a3 += r*a1 +t+t;
a2 += -s*a1 + 3*r - s*s;
a1 += 2*s;
return [a1/u,a2/u**2,a3/u**3,a4/u**4,a6/u**6]
if len(EorP)==2:
x,y=EorP
x-=r
y-=(s*x+t)
return [x/u**2,y/u**3]
if len(EorP)==3:
x,y,z=EorP
x-=r*z
y-=(s*x+t*z)
return [x/u**2,y/u**3,z]
raise ValueError("baseWI(a) only for a=(x,y), (x:y:z) or (a1,a2,a3,a4,a6)")
def isomorphisms(E,F,JustOne=False):
r"""
Returns one or all isomorphisms between two elliptic curves.
INPUT:
- ``E``, ``F`` (EllipticCurve) -- Two elliptic curves.
- ``JustOne`` (bool) If True, returns one isomorphism, or None if
the curves are not isomorphic. If False, returns a (possibly
empty) list of isomorphisms.
OUTPUT:
Either None, or a 4-tuple `(u,r,s,t)` representing an isomorphism,
or a list of these.
.. note::
This function is not intended for users, who should use the
interface provided by ``ell_generic``.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a3'))
[(-1, 0, 0, -1), (1, 0, 0, 0)]
sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a3'),JustOne=True)
(1, 0, 0, 0)
sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a1'))
[]
sage: isomorphisms(EllipticCurve_from_j(0),EllipticCurve('27a1'),JustOne=True)
"""
from ell_generic import is_EllipticCurve
if not is_EllipticCurve(E) or not is_EllipticCurve(F):
raise ValueError("arguments are not elliptic curves")
K = E.base_ring()
# if not K == F.base_ring(): return []
j=E.j_invariant()
if j != F.j_invariant():
if JustOne: return None
return []
from sage.rings.all import PolynomialRing
x=PolynomialRing(K,'x').gen()
a1E, a2E, a3E, a4E, a6E = E.ainvs()
a1F, a2F, a3F, a4F, a6F = F.ainvs()
char=K.characteristic()
if char==2:
if j==0:
ulist=(x**3-(a3E/a3F)).roots(multiplicities=False)
ans=[]
for u in ulist:
slist=(x**4+a3E*x+(a2F**2+a4F)*u**4+a2E**2+a4E).roots(multiplicities=False)
for s in slist:
r=s**2+a2E+a2F*u**2
tlist= (x**2 + a3E*x + r**3 + a2E*r**2 + a4E*r + a6E + a6F*u**6).roots(multiplicities=False)
for t in tlist:
if JustOne: return (u,r,s,t)
ans.append((u,r,s,t))
if JustOne: return None
ans.sort()
return ans
else:
ans=[]
u=a1E/a1F
r=(a3E+a3F*u**3)/a1E
slist=[s[0] for s in (x**2+a1E*x+(r+a2E+a2F*u**2)).roots()]
for s in slist:
t = (a4E+a4F*u**4 + s*a3E + r*s*a1E + r**2)
if JustOne: return (u,r,s,t)
ans.append((u,r,s,t))
if JustOne: return None
ans.sort()
return ans
b2E, b4E, b6E, b8E = E.b_invariants()
b2F, b4F, b6F, b8F = F.b_invariants()
if char==3:
if j==0:
ulist=(x**4-(b4E/b4F)).roots(multiplicities=False)
ans=[]
for u in ulist:
s=a1E-a1F*u
t=a3E-a3F*u**3
rlist=(x**3-b4E*x+(b6E-b6F*u**6)).roots(multiplicities=False)
for r in rlist:
if JustOne: return (u,r,s,t+r*a1E)
ans.append((u,r,s,t+r*a1E))
if JustOne: return None
ans.sort()
return ans
else:
ulist=(x**2-(b2E/b2F)).roots(multiplicities=False)
ans=[]
for u in ulist:
r = (b4F*u**4 -b4E)/b2E
s = (a1E-a1F*u)
t = (a3E-a3F*u**3 + a1E*r)
if JustOne: return (u,r,s,t)
ans.append((u,r,s,t))
if JustOne: return None
ans.sort()
return ans
# now char!=2,3:
c4E,c6E = E.c_invariants()
c4F,c6F = F.c_invariants()
if j==0:
m,um = 6,c6E/c6F
elif j==1728:
m,um=4,c4E/c4F
else:
m,um=2,(c6E*c4F)/(c6F*c4E)
ulist=(x**m-um).roots(multiplicities=False)
ans=[]
for u in ulist:
s = (a1F*u - a1E)/2
r = (a2F*u**2 + a1E*s + s**2 - a2E)/3
t = (a3F*u**3 - a1E*r - a3E)/2
if JustOne: return (u,r,s,t)
ans.append((u,r,s,t))
if JustOne: return None
ans.sort()
return ans
class WeierstrassIsomorphism(baseWI,Morphism):
r"""
Class representing a Weierstrass isomorphism between two elliptic curves.
"""
def __init__(self, E=None, urst=None, F=None):
r"""
Constructor for WeierstrassIsomorphism class,
INPUT:
- ``E`` -- an EllipticCurve, or None (see below).
- ``urst`` -- a 4-tuple `(u,r,s,t)`, or None (see below).
- ``F`` -- an EllipticCurve, or None (see below).
Given two Elliptic Curves ``E`` and ``F`` (represented by
Weierstrass models as usual), and a transformation ``urst``
from ``E`` to ``F``, construct an isomorphism from ``E`` to
``F``. An exception is raised if ``urst(E)!=F``. At most one
of ``E``, ``F``, ``urst`` can be None. If ``F==None`` then
``F`` is constructed as ``urst(E)``. If ``E==None`` then
``E`` is constructed as ``urst^-1(F)``. If ``urst==None``
then an isomorphism from ``E`` to ``F`` is constructed if
possible, and an exception is raised if they are not
isomorphic. Otherwise ``urst`` can be a tuple of length 4 or
a object of type ``baseWI``.
Users will not usually need to use this class directly, but instead use
methods such as ``isomorphism`` of elliptic curves.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: WeierstrassIsomorphism(EllipticCurve([0,1,2,3,4]),(-1,2,3,4))
Generic morphism:
From: Abelian group of points on Elliptic Curve defined by y^2 + 2*y = x^3 + x^2 + 3*x + 4 over Rational Field
To: Abelian group of points on Elliptic Curve defined by y^2 - 6*x*y - 10*y = x^3 - 2*x^2 - 11*x - 2 over Rational Field
Via: (u,r,s,t) = (-1, 2, 3, 4)
sage: E=EllipticCurve([0,1,2,3,4])
sage: F=EllipticCurve(E.cremona_label())
sage: WeierstrassIsomorphism(E,None,F)
Generic morphism:
From: Abelian group of points on Elliptic Curve defined by y^2 + 2*y = x^3 + x^2 + 3*x + 4 over Rational Field
To: Abelian group of points on Elliptic Curve defined by y^2 = x^3 + x^2 + 3*x + 5 over Rational Field
Via: (u,r,s,t) = (1, 0, 0, -1)
sage: w=WeierstrassIsomorphism(None,(1,0,0,-1),F)
sage: w._domain_curve==E
True
"""
from ell_generic import is_EllipticCurve
if E!=None:
if not is_EllipticCurve(E):
raise ValueError("First argument must be an elliptic curve or None")
if F!=None:
if not is_EllipticCurve(F):
raise ValueError("Third argument must be an elliptic curve or None")
if urst!=None:
if len(urst)!=4:
raise ValueError("Second argument must be [u,r,s,t] or None")
if len([par for par in [E,urst,F] if par!=None])<2:
raise ValueError("At most 1 argument can be None")
if F==None: # easy case
baseWI.__init__(self,*urst)
F=EllipticCurve(baseWI.__call__(self,list(E.a_invariants())))
Morphism.__init__(self, Hom(E(0).parent(), F(0).parent()))
self._domain_curve = E
self._codomain_curve = F
return
if E==None: # easy case in reverse
baseWI.__init__(self,*urst)
inv_urst=baseWI.__invert__(self)
E=EllipticCurve(baseWI.__call__(inv_urst,list(F.a_invariants())))
Morphism.__init__(self, Hom(E(0).parent(), F(0).parent()))
self._domain_curve = E
self._codomain_curve = F
return
if urst==None: # try to construct the morphism
urst=isomorphisms(E,F,True)
if urst==None:
raise ValueError("Elliptic curves not isomorphic.")
baseWI.__init__(self, *urst)
Morphism.__init__(self, Hom(E(0).parent(), F(0).parent()))
self._domain_curve = E
self._codomain_curve = F
return
# none of the parameters is None:
baseWI.__init__(self,*urst)
if F!=EllipticCurve(baseWI.__call__(self,list(E.a_invariants()))):
raise ValueError("second argument is not an isomorphism from first argument to third argument")
else:
Morphism.__init__(self, Hom(E(0).parent(), F(0).parent()))
self._domain_curve = E
self._codomain_curve = F
return
def __cmp__(self, other):
r"""
Standard comparison function for the WeierstrassIsomorphism class.
EXAMPLE::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: E=EllipticCurve('389a1')
sage: F=E.change_weierstrass_model(1,2,3,4)
sage: w1=E.isomorphism_to(F)
sage: w1==w1
True
sage: w2 = F.automorphisms()[0] *w1
sage: w1==w2
False
::
sage: E=EllipticCurve_from_j(GF(7)(0))
sage: F=E.change_weierstrass_model(2,3,4,5)
sage: a=E.isomorphisms(F)
sage: b=[w*a[0] for w in F.automorphisms()]
sage: b.sort()
sage: a==b
True
sage: c=[a[0]*w for w in E.automorphisms()]
sage: c.sort()
sage: a==c
True
"""
if not isinstance(other, WeierstrassIsomorphism):
return cmp(type(self), type(other))
t = cmp(self._domain_curve, other._domain_curve)
if t: return t
t = cmp(self._codomain_curve, other._codomain_curve)
if t: return t
return baseWI.__cmp__(self,other)
def __call__(self, P):
r"""
Call function for WeierstrassIsomorphism class.
INPUT:
- ``P`` (Point) -- a point on the domain curve.
OUTPUT:
(Point) the transformed point on the codomain curve.
EXAMPLES::
sage: from sage.schemes.elliptic_curves.weierstrass_morphism import *
sage: E=EllipticCurve('37a1')
sage: w=WeierstrassIsomorphism(E,(2,3,4,5))
sage: P=E(0,-1)
sage: w(P)
(-3/4 : 3/4 : 1)
sage: w(P).curve()==E.change_weierstrass_model((2,3,4,5))
True
"""
if P[2] == 0:
return self._codomain_curve(0)
else:
return self._codomain_curve.point(baseWI.__call__(self,tuple(P._coords)), check=False)
def __invert__(self):
r"""
Returns the inverse of this WeierstrassIsomorphism.
EXAMPLES::
sage: E = EllipticCurve('5077')
sage: F = E.change_weierstrass_model([2,3,4,5]); F
Elliptic Curve defined by y^2 + 4*x*y + 11/8*y = x^3 - 7/4*x^2 - 3/2*x - 9/32 over Rational Field
sage: w = E.isomorphism_to(F)
sage: P = E(-2,3,1)
sage: w(P)
(-5/4 : 9/4 : 1)
sage: ~w
Generic morphism:
From: Abelian group of points on Elliptic Curve defined by y^2 + 4*x*y + 11/8*y = x^3 - 7/4*x^2 - 3/2*x - 9/32 over Rational Field
To: Abelian group of points on Elliptic Curve defined by y^2 + y = x^3 - 7*x + 6 over Rational Field
Via: (u,r,s,t) = (1/2, -3/4, -2, 7/8)
sage: Q = w(P); Q
(-5/4 : 9/4 : 1)
sage: (~w)(Q)
(-2 : 3 : 1)
"""
winv=baseWI.__invert__(self).tuple()
return WeierstrassIsomorphism(self._codomain_curve, winv, self._domain_curve)
def __mul__(self,other):
r"""
Returns the composition of this WeierstrassIsomorphism and the other,
WeierstrassMorphisms can be composed using ``*`` if the
codomain & domain match: `(w1*w2)(X)=w1(w2(X))`, so we require
``w1.domain()==w2.codomain()``.
EXAMPLES::
sage: E1 = EllipticCurve('5077')
sage: E2 = E1.change_weierstrass_model([2,3,4,5])
sage: w1 = E1.isomorphism_to(E2)
sage: E3 = E2.change_weierstrass_model([6,7,8,9])
sage: w2 = E2.isomorphism_to(E3)
sage: P = E1(-2,3,1)
sage: (w2*w1)(P)==w2(w1(P))
True
"""
if self._domain_curve==other._codomain_curve:
w=baseWI.__mul__(self,other)
return WeierstrassIsomorphism(other._domain_curve, w.tuple(), self._codomain_curve)
else:
raise ValueError("Domain of first argument must equal codomain of second")
def __repr__(self):
r"""
Returns the string representation of this WeierstrassIsomorphism.
OUTPUT:
(string) The underlying morphism, together with an extra line
showing the `(u,r,s,t)` parameters.
EXAMPLES::
sage: E1 = EllipticCurve('5077')
sage: E2 = E1.change_weierstrass_model([2,3,4,5])
sage: E1.isomorphism_to(E2)
Generic morphism:
From: Abelian group of points on Elliptic Curve defined by y^2 + y = x^3 - 7*x + 6 over Rational Field
To: Abelian group of points on Elliptic Curve defined by y^2 + 4*x*y + 11/8*y = x^3 - 7/4*x^2 - 3/2*x - 9/32 over Rational Field
Via: (u,r,s,t) = (2, 3, 4, 5)
"""
return Morphism.__repr__(self)+"\n Via: (u,r,s,t) = "+baseWI.__repr__(self)
| [
[
[
903,
911
],
[
11452,
11460
],
[
14589,
14597
],
[
14959,
14967
],
[
15353,
15361
],
[
15792,
15800
],
[
20713,
20721
]
],
[
[
936,
949
],
[
14517,
14530
],
[
14883,
14896
],
[
15597,
15610
]
],
[
[
985,
988
],
[
14613,
14616
],
[
14983,
14986
],
[
15377,
15380
],
[
15816,
15819
]
],
[
[
996,
1002
],
[
11445,
11451
],
[
3149,
3155
],
[
4120,
4126
],
[
4793,
4799
],
[
14475,
14481
],
[
14531,
14537
],
[
14796,
14802
],
[
14845,
14851
],
[
14897,
14903
],
[
15312,
15318
],
[
15555,
15561
],
[
15611,
15617
],
[
17049,
17055
],
[
17830,
17836
],
[
18854,
18860
],
[
19719,
19725
],
[
20762,
20768
]
],
[
[
7236,
7248
],
[
15182,
15194
]
],
[
[
11422,
11444
],
[
16797,
16819
],
[
18901,
18923
],
[
19765,
19787
]
]
] |
"""
Settings specific to prod-like deployable code, reading values from system environment variables.
"""
import os
from conf.configs import common
from conf.settings import PROJECT_ID
__author__ = "Alex Laird"
__copyright__ = "Copyright 2018, Helium Edu"
__version__ = "1.1.15"
# Define the base working directory of the application
BASE_DIR = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", ".."))
# Application definition
INSTALLED_APPS = common.INSTALLED_APPS
MIDDLEWARE = common.MIDDLEWARE + (
"rollbar.contrib.django.middleware.RollbarNotifierMiddleware",
)
TEMPLATES = common.TEMPLATES
if common.DEBUG:
TEMPLATES[0]["OPTIONS"]["context_processors"] += (
"django.template.context_processors.debug",
)
#############################
# Django configuration
#############################
# Security
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
# Logging
ROLLBAR = {
"access_token": os.environ.get("PLATFORM_ROLLBAR_POST_SERVER_ITEM_ACCESS_TOKEN"),
"environment": os.environ.get("ENVIRONMENT"),
"branch": "main",
"root": BASE_DIR,
}
if not common.DEBUG:
ADMINS = (
(common.PROJECT_NAME, common.ADMIN_EMAIL_ADDRESS),
)
MANAGERS = ADMINS
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S"
},
},
"filters": {
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
}
},
"handlers": {
"rollbar": {
"level": "WARN",
"class": "rollbar.logger.RollbarHandler",
"filters": ["require_debug_false"],
},
"django": {
"level": "ERROR",
"class": "logging.handlers.RotatingFileHandler",
"filename": f"/var/log/{PROJECT_ID}/django.log",
"maxBytes": 50000000,
"backupCount": 3,
"formatter": "standard",
},
f"{PROJECT_ID}_app": {
"level": "INFO",
"class": "logging.handlers.RotatingFileHandler",
"filename": f"/var/log/{PROJECT_ID}/app.log",
"maxBytes": 50000000,
"backupCount": 3,
"formatter": "standard",
},
},
"loggers": {
"django.request": {
"handlers": ["django", "rollbar"],
"level": "ERROR",
"propagate": False,
},
"{%PROJECT_ID_LOWER%}.app": {
"handlers": [f"{PROJECT_ID}_app", "rollbar"],
"level": "INFO",
},
}
}
# Cache
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": os.environ.get("{%PROJECT_ID_UPPER%}_REDIS_HOST"),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
},
}
# Database
DATABASES = {
"default": {
"NAME": os.environ.get("{%PROJECT_ID_UPPER%}_DB_NAME"),
"ENGINE": "django.db.backends.mysql",
"HOST": os.environ.get("{%PROJECT_ID_UPPER%}_DB_HOST"),
"USER": os.environ.get("{%PROJECT_ID_UPPER%}_DB_USER"),
"PASSWORD": os.environ.get("{%PROJECT_ID_UPPER%}_DB_PASSWORD"),
}
}
| [
[
[
114,
116
],
[
349,
351
],
[
366,
368
],
[
379,
381
],
[
395,
397
],
[
967,
969
],
[
1052,
1054
],
[
2818,
2820
],
[
3034,
3036
],
[
3144,
3146
],
[
3208,
3210
],
[
3276,
3278
]
],
[
[
143,
149
],
[
480,
486
],
[
516,
522
],
[
620,
626
],
[
641,
647
],
[
1137,
1143
],
[
1175,
1181
],
[
1196,
1202
]
],
[
[
176,
186
],
[
2112,
2122
],
[
1964,
1974
],
[
2258,
2268
],
[
2630,
2640
]
],
[
[
188,
198
]
],
[
[
214,
227
]
],
[
[
259,
270
]
],
[
[
338,
346
],
[
1117,
1125
]
],
[
[
463,
477
]
],
[
[
503,
513
]
],
[
[
608,
617
],
[
659,
668
]
],
[
[
865,
879
]
],
[
[
935,
942
]
],
[
[
1155,
1161
],
[
1246,
1252
]
],
[
[
1235,
1243
]
],
[
[
1254,
1261
]
],
[
[
2718,
2724
]
],
[
[
2987,
2996
]
]
] |
import operator
from functools import reduce
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.db.models import Q, Sum
from django.shortcuts import HttpResponse, get_object_or_404, redirect, render
from django.views.generic import View
from django.views.generic.base import TemplateView
from .forms import RecipeForm
from .models import (Purchase, Recipe, Subscription)
from .utils import paginator_data
User = get_user_model()
def index(request):
'''Вьюха отображения главной страницы'''
# получаем список тегов из GET запроса
tags = request.GET.getlist('tag')
if tags:
# фильтрация по совокупности выбранных тегов
query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags))
recipies = Recipe.objects.filter(query).order_by('-date_pub')
else:
recipies = Recipe.objects.all().order_by('-date_pub')
# Т.к. паджинатор есть почти на каждой странице - вынес некоторые моменты
# в отдельную функцию в utils.py
page, paginator = paginator_data(request, recipies)
return render(request, 'index.html', context={'page': page,
'paginator': paginator,
'tags': tags})
def recipe_detail(request, slug):
'''Вьюха отображения страницы рецепта'''
recipe = get_object_or_404(Recipe, slug__iexact=slug)
return render(request, 'recipe_detail.html', context={'recipe': recipe})
def profile_index(request, username):
'''Персональная страница пользователя'''
author = get_object_or_404(User, username=username)
user = request.user
tags = request.GET.getlist('tag')
if tags:
# фильтрация по совокупности выбранных тегов
query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags))
recipies = author.recipes.filter(query).order_by('-date_pub')
else:
recipies = author.recipes.all().order_by('-date_pub')
following = Subscription.objects.filter(user__username=user,
author=author).count()
return render(request, 'profile.html', context={'recipies': recipies,
'author': author,
'user': user,
'following': following,
'tags': tags})
@login_required
def subscription_index(request):
'''Страница подписок пользователя'''
follow_authors = User.objects.filter(
following__user=request.user).prefetch_related('recipes')
page, paginator = paginator_data(request, follow_authors)
return render(request, 'subscription_index.html',
context={'page': page, 'paginator': paginator, })
@login_required
def favorite_index(request):
'''Страница подписок пользователя'''
tags = request.GET.getlist('tag')
if tags:
# фильтрация по совокупности выбранных тегов
query = reduce(operator.or_, (Q(tags__contains=tag) for tag in tags))
recipies = Recipe.objects.filter(query).order_by('-date_pub').filter(
favorites__user=request.user).select_related('author')
else:
recipies = Recipe.objects.all().order_by('-date_pub').filter(
favorites__user=request.user).select_related('author')
page, paginator = paginator_data(request, recipies)
return render(request, 'favorite_index.html',
context={'page': page,
'paginator': paginator,
'tags': tags})
@login_required
def purchase_index(request):
'''Список покупок'''
recipies = Recipe.objects.filter(
purchases__user=request.user)
return render(request, 'purchase_index.html', context={
'recipies': recipies})
@login_required
def get_purchase_list(request):
'''Загрузка txt файла со списком ингридиентов выбранных рецептов'''
file_name = 'Purchase_list.txt'
txt = ''
purchase = Purchase.objects.filter(user=request.user)
ingredients = purchase.values('recipe__ingredients__title',
'recipe__ingredients__dimension').annotate(
total_amount=Sum('recipe__ingredients__ingredient_recipe__amount'
''))
result = set()
for ingredient in ingredients:
if ingredient['recipe__ingredients__title'] not in result:
item = (f'{ingredient["recipe__ingredients__title"]} '
f'{ingredient["total_amount"]} '
f'{ingredient["recipe__ingredients__dimension"]}'
)
result.add(ingredient['recipe__ingredients__title'])
txt += item + '\n'
response = HttpResponse(txt, content_type='application/text charset=utf-8')
response['Content-Disposition'] = f'attachment; filename={file_name}'
return response
class RecipeCreateUpdate(View):
'''Создание или редактирование рецепта'''
def get(self, request, slug=None):
if slug:
recipe = get_object_or_404(Recipe,
author__username=(self.request.
user.username),
slug__iexact=slug)
form = RecipeForm(instance=recipe)
title = 'Редактирование рецепта'
botton_name = 'Изменить рецепт'
context = {
'form': form,
'botton_name': botton_name,
'title': title,
'recipe': recipe,
}
else:
form = RecipeForm()
title = 'Создание рецепта'
botton_name = 'Создать рецепт'
context = {
'form': form,
'botton_name': botton_name,
'title': title
}
template = 'recipe_create_or_update.html'
return render(request, template, context)
def post(self, request, slug=None):
if slug:
recipe = get_object_or_404(Recipe,
author__username=(self.request.
user.username),
slug__iexact=slug)
if request.user != recipe.author:
return redirect('index')
bound_form = RecipeForm(request.POST or None,
files=request.FILES or None,
instance=recipe,
initial={"request": request})
context = {
'form': bound_form,
'title': 'Редактирование рецепта',
'botton_name': 'Редактирование рецепта',
'recipe': recipe
}
else:
bound_form = RecipeForm(request.POST or None,
files=request.FILES or None,
initial={"request": request})
context = {
'form': bound_form,
'title': 'Создание рецепта',
'botton_name': 'Создать рецепт'
}
if bound_form.is_valid():
new_recipe = bound_form.save(commit=False)
new_recipe.tags = request.POST.getlist('tags')
return redirect(new_recipe)
return render(request, 'recipe_create_or_update.html',
context=context)
class RecipeDelete(View):
'''Удаление рецепта'''
def get(self, request, pk):
recipe = get_object_or_404(Recipe, author=request.user, id=pk)
recipe.delete()
return redirect('index')
class About(TemplateView):
'''Об авторе'''
template_name = 'about.html'
class Technologies(TemplateView):
'''Технологии'''
template_name = 'technologies.html'
| [
[
[
7,
15
],
[
735,
743
],
[
1815,
1823
],
[
3097,
3105
]
],
[
[
38,
44
],
[
728,
734
],
[
1808,
1814
],
[
3090,
3096
]
],
[
[
78,
92
],
[
481,
495
]
],
[
[
136,
150
],
[
2501,
2515
],
[
2885,
2899
],
[
3689,
3703
],
[
3928,
3942
]
],
[
[
180,
181
],
[
750,
751
],
[
1830,
1831
],
[
3112,
3113
]
],
[
[
183,
186
],
[
4317,
4320
]
],
[
[
216,
228
],
[
4844,
4856
]
],
[
[
230,
247
],
[
1401,
1418
],
[
1621,
1638
],
[
5162,
5179
],
[
6154,
6171
],
[
7696,
7713
]
],
[
[
249,
257
],
[
6451,
6459
],
[
7468,
7476
],
[
7789,
7797
]
],
[
[
259,
265
],
[
1115,
1121
],
[
1457,
1463
],
[
2156,
2162
],
[
2771,
2777
],
[
3513,
3519
],
[
3845,
3851
],
[
6040,
6046
],
[
7504,
7510
]
],
[
[
299,
303
],
[
5031,
5035
],
[
7612,
7616
]
],
[
[
342,
354
],
[
7821,
7833
],
[
7910,
7922
]
],
[
[
375,
385
],
[
5409,
5419
],
[
5737,
5747
],
[
6494,
6504
],
[
6967,
6977
]
],
[
[
407,
415
],
[
4111,
4119
]
],
[
[
417,
423
],
[
809,
815
],
[
889,
895
],
[
1419,
1425
],
[
3171,
3177
],
[
3326,
3332
],
[
3773,
3779
],
[
5180,
5186
],
[
6172,
6178
],
[
7714,
7720
]
],
[
[
425,
437
],
[
2029,
2041
]
],
[
[
458,
472
],
[
1069,
1083
],
[
2720,
2734
],
[
3467,
3481
]
],
[
[
474,
478
],
[
1639,
1643
],
[
2611,
2615
]
],
[
[
504,
509
]
],
[
[
1313,
1326
]
],
[
[
1529,
1542
]
],
[
[
2520,
2538
]
],
[
[
2904,
2918
]
],
[
[
3708,
3722
]
],
[
[
3947,
3964
]
],
[
[
5012,
5030
]
],
[
[
7599,
7611
]
],
[
[
7815,
7820
]
],
[
[
7897,
7909
]
]
] |
import pandas as pd
import numpy as np
import torch
from sklearn.model_selection import train_test_split
from backend.services.toxic_comment_jigsaw.application.ai.model import BERTClassifier
from backend.services.toxic_comment_jigsaw.application.ai.training.src.dataset import BERTDataset
from backend.services.toxic_comment_jigsaw.application.ai.training.src.preprocess import Preprocess
from backend.services.toxic_comment_jigsaw.application.ai.training.src.engine import Engine
from backend.services.toxic_comment_jigsaw.application.ai.settings import Settings
from transformers import AdamW, get_linear_schedule_with_warmup
from torch.utils.data import DataLoader
class Train:
def __init__(self):
# initialize required class
self.settings = Settings
self.engine = Engine()
self.preprocess = Preprocess()
# initialize required variables
self.bert_classifier = None
self.optimizer = None
self.scheduler = None
self.train_data_loader = None
self.val_data_loader = None
self.total_steps = None
self.best_accuracy = 0
def __initialize(self):
# Instantiate Bert Classifier
self.bert_classifier = BERTClassifier(freeze_bert=False)
self.bert_classifier.to(self.settings.DEVICE)
# Create the optimizer
self.optimizer = AdamW(self.bert_classifier.parameters(),
lr=5e-5, # Default learning rate
eps=1e-8 # Default epsilon value
)
# Set up the learning rate scheduler
self.scheduler = get_linear_schedule_with_warmup(self.optimizer,
num_warmup_steps=0, # Default value
num_training_steps=self.total_steps)
def crete_data_loaders(self, dataset):
pass
def load_data(self):
train_df = pd.read_csv(self.settings.TRAIN_DATA).fillna("none")
train_df['comment_text'] = train_df['comment_text'].apply(lambda x: self.preprocess.clean_text(x))
X = list(train_df['comment_text'])
y = np.array(train_df.loc[:, 'toxic':])
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.20, random_state=self.settings.RANDOM_STATE)
# training dataset
train_dataset = BERTDataset(X_train, y_train)
# validation dataset
val_dataset = BERTDataset(X_val, y_val)
self.train_data_loader = DataLoader(train_dataset,
batch_size=self.settings.TRAIN_BATCH_SIZE,
shuffle=True,
num_workers=self.settings.TRAIN_NUM_WORKERS)
self.val_data_loader = DataLoader(val_dataset,
batch_size=self.settings.VALID_BATCH_SIZE,
shuffle=True,
num_workers=self.settings.VAL_NUM_WORKERS)
self.total_steps = int(len(X_train) / self.settings.TRAIN_BATCH_SIZE * self.settings.EPOCHS)
def train(self):
for epochs in range(self.settings.EPOCHS):
# calling the training function in engine.py file
self.engine.train_fn(data_loader=self.train_data_loader,
model=self.bert_classifier,
optimizer=self.optimizer,
device=self.settings.DEVICE,
schedular=self.scheduler)
# calling the evaluation function from the engine.py file to compute evaluation
val_loss, val_accuracy = self.engine.eval_fn(data_loader=self.val_data_loader,
model=self.bert_classifier,
device=self.settings.DEVICE)
# updating the accuracy
if val_accuracy > self.best_accuracy:
torch.save(self.bert_classifier.state_dict(), self.settings.MODEL_PATH)
self.best_accuracy = val_accuracy
def run(self):
try:
print("Loading and Preparing the Dataset-----!! ")
self.load_data()
print("Dataset Successfully Loaded and Prepared-----!! ")
print()
print("-" * 70)
print("Loading and Initializing the Bert Model -----!! ")
self.__initialize()
print("Model Successfully Loaded and Initialized-----!! ")
print()
print("-" * 70)
print("------------------Starting Training-----------!!")
self.engine.set_seed()
self.train()
print("Training complete-----!!!")
except BaseException as ex:
print("Following Exception Occurred---!! ", str(ex))
| [
[
[
7,
19
],
[
1977,
1979
]
],
[
[
27,
38
],
[
2192,
2194
]
],
[
[
46,
51
],
[
4092,
4097
]
],
[
[
89,
105
],
[
2270,
2286
]
],
[
[
177,
191
],
[
1220,
1234
]
],
[
[
278,
289
],
[
2402,
2413
],
[
2484,
2495
]
],
[
[
379,
389
],
[
835,
845
]
],
[
[
475,
481
],
[
800,
806
]
],
[
[
556,
564
],
[
769,
777
]
],
[
[
591,
596
],
[
1365,
1370
]
],
[
[
598,
629
],
[
1639,
1670
]
],
[
[
659,
669
],
[
2544,
2554
],
[
2836,
2846
]
],
[
[
678,
683
]
]
] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Guidelines for writing new hacking checks
- Use only for Rally specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range N3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the N3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to tests/unit/test_hacking.py
"""
import functools
import re
import tokenize
re_assert_equal_end_with_true_or_false = re.compile(
r"assertEqual\(.*?, \s+(True|False)\)$")
re_assert_equal_start_with_true_or_false = re.compile(
r"assertEqual\((True|False),")
re_assert_true_instance = re.compile(
r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, "
r"(\w|\.|\'|\"|\[|\])+\)\)")
re_assert_equal_type = re.compile(
r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), "
r"(\w|\.|\'|\"|\[|\])+\)")
re_assert_equal_end_with_none = re.compile(r"assertEqual\(.*?,\s+None\)$")
re_assert_equal_start_with_none = re.compile(r"assertEqual\(None,")
re_assert_not_equal_end_with_none = re.compile(
r"assertNotEqual\(.*?,\s+None\)$")
re_assert_not_equal_start_with_none = re.compile(r"assertNotEqual\(None,")
re_assert_true_false_with_in_or_not_in = re.compile(
r"assert(True|False)\("
r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)")
re_assert_true_false_with_in_or_not_in_spaces = re.compile(
r"assert(True|False)\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+"
r"[\[|'|\"](, .*)?\)")
re_assert_equal_in_end_with_true_or_false = re.compile(
r"assertEqual\((\w|[][.'\"])+( not)? in (\w|[][.'\", ])+, (True|False)\)")
re_assert_equal_in_start_with_true_or_false = re.compile(
r"assertEqual\((True|False), (\w|[][.'\"])+( not)? in (\w|[][.'\", ])+\)")
re_no_construct_dict = re.compile(
r"\sdict\(\)")
re_no_construct_list = re.compile(
r"\slist\(\)")
re_str_format = re.compile(r"""
% # start of specifier
\(([^)]+)\) # mapping key, in group 1
[#0 +\-]? # optional conversion flag
(?:-?\d*)? # optional minimum field width
(?:\.\d*)? # optional precision
[hLl]? # optional length modifier
[A-z%] # conversion modifier
""", re.X)
re_raises = re.compile(
r"\s:raise[^s] *.*$|\s:raises *:.*$|\s:raises *[^:]+$")
re_db_import = re.compile(r"^from rally.common import db")
re_objects_import = re.compile(r"^from rally.common import objects")
re_old_type_class = re.compile(r"^\s*class \w+(\(\))?:")
re_datetime_alias = re.compile(r"^(from|import) datetime(?!\s+as\s+dt$)")
re_log_warn = re.compile(r"(.)*LOG\.(warn)\(\s*('|\"|_)")
def skip_ignored_lines(func):
@functools.wraps(func)
def wrapper(logical_line, physical_line, filename):
line = physical_line.strip()
if not line or line.startswith("#") or line.endswith("# noqa"):
return
yield next(func(logical_line, physical_line, filename))
return wrapper
def _parse_assert_mock_str(line):
point = line.find(".assert_")
if point == -1:
point = line.find(".called_once_with(")
if point != -1:
end_pos = line[point:].find("(") + point
return point, line[point + 1: end_pos], line[: point]
else:
return None, None, None
@skip_ignored_lines
def check_assert_methods_from_mock(logical_line, physical_line, filename):
"""Ensure that ``assert_*`` methods from ``mock`` library is used correctly
N301 - base error number
N302 - related to nonexistent "assert_called"
N303 - related to nonexistent "assert_called_once"
N304 - related to nonexistent "called_once_with"
"""
correct_names = ["assert_any_call", "assert_called_once_with",
"assert_called_with", "assert_has_calls",
"assert_not_called"]
ignored_files = ["./tests/unit/test_hacking.py"]
if filename.startswith("./tests") and filename not in ignored_files:
pos, method_name, obj_name = _parse_assert_mock_str(logical_line)
if pos:
if method_name not in correct_names:
error_number = "N301"
msg = ("%(error_number)s:'%(method)s' is not present in `mock`"
" library. %(custom_msg)s For more details, visit "
"http://www.voidspace.org.uk/python/mock/ .")
if method_name == "assert_called":
error_number = "N302"
custom_msg = ("Maybe, you should try to use "
"'assertTrue(%s.called)' instead." %
obj_name)
elif method_name == "assert_called_once":
# For more details, see a bug in Rally:
# https://bugs.launchpad.net/rally/+bug/1305991
error_number = "N303"
custom_msg = ("Maybe, you should try to use "
"'assertEqual(1, %s.call_count)' "
"or '%s.assert_called_once_with()'"
" instead." % (obj_name, obj_name))
elif method_name == "called_once_with":
error_number = "N304"
custom_msg = ("Maybe, you should try to use "
"'%s.assert_called_once_with()'"
" instead." % obj_name)
else:
custom_msg = ("Correct 'assert_*' methods: '%s'."
% "', '".join(correct_names))
yield (pos, msg % {
"error_number": error_number,
"method": method_name,
"custom_msg": custom_msg})
@skip_ignored_lines
def check_import_of_logging(logical_line, physical_line, filename):
"""Check correctness import of logging module
N310
"""
excluded_files = ["./rally/common/logging.py",
"./tests/unit/test_logging.py",
"./tests/ci/rally_verify.py",
"./tests/ci/sync_requirements.py"]
forbidden_imports = ["from oslo_log",
"import oslo_log",
"import logging"]
if filename not in excluded_files:
for forbidden_import in forbidden_imports:
if logical_line.startswith(forbidden_import):
yield (0, "N310 Wrong module for logging is imported. Please "
"use `rally.common.logging` instead.")
@skip_ignored_lines
def check_import_of_config(logical_line, physical_line, filename):
"""Check correctness import of config module
N311
"""
excluded_files = ["./rally/common/cfg.py"]
forbidden_imports = ["from oslo_config",
"import oslo_config"]
if filename not in excluded_files:
for forbidden_import in forbidden_imports:
if logical_line.startswith(forbidden_import):
yield (0, "N311 Wrong module for config is imported. Please "
"use `rally.common.cfg` instead.")
@skip_ignored_lines
def no_use_conf_debug_check(logical_line, physical_line, filename):
"""Check for "cfg.CONF.debug"
Rally has two DEBUG level:
- Full DEBUG, which include all debug-messages from all OpenStack services
- Rally DEBUG, which include only Rally debug-messages
so we should use custom check to know debug-mode, instead of CONF.debug
N312
"""
excluded_files = ["./rally/common/logging.py"]
point = logical_line.find("CONF.debug")
if point != -1 and filename not in excluded_files:
yield(point, "N312 Don't use `CONF.debug`. "
"Function `rally.common.logging.is_debug` "
"should be used instead.")
@skip_ignored_lines
def assert_true_instance(logical_line, physical_line, filename):
"""Check for assertTrue(isinstance(a, b)) sentences
N320
"""
if re_assert_true_instance.match(logical_line):
yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, "
"you should use assertIsInstance(a, b) instead.")
@skip_ignored_lines
def assert_equal_type(logical_line, physical_line, filename):
"""Check for assertEqual(type(A), B) sentences
N321
"""
if re_assert_equal_type.match(logical_line):
yield (0, "N321 assertEqual(type(A), B) sentences not allowed, "
"you should use assertIsInstance(a, b) instead.")
@skip_ignored_lines
def assert_equal_none(logical_line, physical_line, filename):
"""Check for assertEqual(A, None) or assertEqual(None, A) sentences
N322
"""
res = (re_assert_equal_start_with_none.search(logical_line) or
re_assert_equal_end_with_none.search(logical_line))
if res:
yield (0, "N322 assertEqual(A, None) or assertEqual(None, A) "
"sentences not allowed, you should use assertIsNone(A) "
"instead.")
@skip_ignored_lines
def assert_true_or_false_with_in(logical_line, physical_line, filename):
"""Check assertTrue/False(A in/not in B) with collection contents
Check for assertTrue/False(A in B), assertTrue/False(A not in B),
assertTrue/False(A in B, message) or assertTrue/False(A not in B, message)
sentences.
N323
"""
res = (re_assert_true_false_with_in_or_not_in.search(logical_line) or
re_assert_true_false_with_in_or_not_in_spaces.search(logical_line))
if res:
yield (0, "N323 assertTrue/assertFalse(A in/not in B)sentences not "
"allowed, you should use assertIn(A, B) or assertNotIn(A, B)"
" instead.")
@skip_ignored_lines
def assert_equal_in(logical_line, physical_line, filename):
"""Check assertEqual(A in/not in B, True/False) with collection contents
Check for assertEqual(A in B, True/False), assertEqual(True/False, A in B),
assertEqual(A not in B, True/False) or assertEqual(True/False, A not in B)
sentences.
N324
"""
res = (re_assert_equal_in_end_with_true_or_false.search(logical_line) or
re_assert_equal_in_start_with_true_or_false.search(logical_line))
if res:
yield (0, "N324: Use assertIn/NotIn(A, B) rather than "
"assertEqual(A in/not in B, True/False) when checking "
"collection contents.")
@skip_ignored_lines
def assert_not_equal_none(logical_line, physical_line, filename):
"""Check for assertNotEqual(A, None) or assertEqual(None, A) sentences
N325
"""
res = (re_assert_not_equal_start_with_none.search(logical_line) or
re_assert_not_equal_end_with_none.search(logical_line))
if res:
yield (0, "N325 assertNotEqual(A, None) or assertNotEqual(None, A) "
"sentences not allowed, you should use assertIsNotNone(A) "
"instead.")
@skip_ignored_lines
def assert_equal_true_or_false(logical_line, physical_line, filename):
"""Check for assertEqual(A, True/False) sentences
Check for assertEqual(A, True/False) sentences or
assertEqual(True/False, A)
N326
"""
res = (re_assert_equal_end_with_true_or_false.search(logical_line) or
re_assert_equal_start_with_true_or_false.search(logical_line))
if res:
yield (0, "N326 assertEqual(A, True/False) or "
"assertEqual(True/False, A) sentences not allowed,"
"you should use assertTrue(A) or assertFalse(A) instead.")
@skip_ignored_lines
def check_no_direct_rally_objects_import(logical_line, physical_line,
filename):
"""Check if rally.common.objects are properly imported.
If you import "from rally.common import objects" you are able to use
objects directly like objects.Task.
N340
"""
if filename == "./rally/common/objects/__init__.py":
return
if filename == "./rally/common/objects/endpoint.py":
return
if (logical_line.startswith("from rally.common.objects")
or logical_line.startswith("import rally.common.objects.")):
yield (0, "N340: Import objects module:"
"`from rally.common import objects`. "
"After that you can use directly objects e.g. objects.Task")
@skip_ignored_lines
def check_no_oslo_deprecated_import(logical_line, physical_line, filename):
"""Check if oslo.foo packages are not imported instead of oslo_foo ones.
Libraries from oslo.foo namespace are deprecated because of namespace
problems.
N341
"""
if (logical_line.startswith("from oslo.")
or logical_line.startswith("import oslo.")):
yield (0, "N341: Import oslo module: `from oslo_xyz import ...`. "
"The oslo.xyz namespace was deprecated, use oslo_xyz "
"instead")
@skip_ignored_lines
def check_quotes(logical_line, physical_line, filename):
"""Check that single quotation marks are not used
N350
"""
in_string = False
in_multiline_string = False
single_quotas_are_used = False
check_tripple = (
lambda line, i, char: (
i + 2 < len(line) and
(char == line[i] == line[i + 1] == line[i + 2])
)
)
i = 0
while i < len(logical_line):
char = logical_line[i]
if in_string:
if char == "\"":
in_string = False
if char == "\\":
i += 1 # ignore next char
elif in_multiline_string:
if check_tripple(logical_line, i, "\""):
i += 2 # skip next 2 chars
in_multiline_string = False
elif char == "#":
break
elif char == "'":
single_quotas_are_used = True
break
elif char == "\"":
if check_tripple(logical_line, i, "\""):
in_multiline_string = True
i += 3
continue
in_string = True
i += 1
if single_quotas_are_used:
yield (i, "N350 Remove Single quotes")
@skip_ignored_lines
def check_no_constructor_data_struct(logical_line, physical_line, filename):
"""Check that data structs (lists, dicts) are declared using literals
N351
"""
match = re_no_construct_dict.search(logical_line)
if match:
yield (0, "N351 Remove dict() construct and use literal {}")
match = re_no_construct_list.search(logical_line)
if match:
yield (0, "N351 Remove list() construct and use literal []")
def check_dict_formatting_in_string(logical_line, tokens):
"""Check that strings do not use dict-formatting with a single replacement
N352
"""
# NOTE(stpierre): Can't use @skip_ignored_lines here because it's
# a stupid decorator that only works on functions that take
# (logical_line, filename) as arguments.
if (not logical_line or
logical_line.startswith("#") or
logical_line.endswith("# noqa")):
return
current_string = ""
in_string = False
for token_type, text, start, end, line in tokens:
if token_type == tokenize.STRING:
if not in_string:
current_string = ""
in_string = True
current_string += text.strip("\"")
elif token_type == tokenize.OP:
if not current_string:
continue
# NOTE(stpierre): The string formatting operator % has
# lower precedence than +, so we assume that the logical
# string has concluded whenever we hit an operator of any
# sort. (Most operators don't work for strings anyway.)
# Some string operators do have higher precedence than %,
# though, so you can technically trick this check by doing
# things like:
#
# "%(foo)s" * 1 % {"foo": 1}
# "%(foo)s"[:] % {"foo": 1}
#
# It also will produce false positives if you use explicit
# parenthesized addition for two strings instead of
# concatenation by juxtaposition, e.g.:
#
# ("%(foo)s" + "%(bar)s") % vals
#
# But if you do any of those things, then you deserve all
# of the horrible things that happen to you, and probably
# many more.
in_string = False
if text == "%":
format_keys = set()
for match in re_str_format.finditer(current_string):
format_keys.add(match.group(1))
if len(format_keys) == 1:
yield (0,
"N353 Do not use mapping key string formatting "
"with a single key")
if text != ")":
# NOTE(stpierre): You can have a parenthesized string
# followed by %, so a closing paren doesn't obviate
# the possibility for a substitution operator like
# every other operator does.
current_string = ""
elif token_type in (tokenize.NL, tokenize.COMMENT):
continue
else:
in_string = False
if token_type == tokenize.NEWLINE:
current_string = ""
@skip_ignored_lines
def check_using_unicode(logical_line, physical_line, filename):
"""Check crosspython unicode usage
N353
"""
if re.search(r"\bunicode\(", logical_line):
yield (0, "N353 'unicode' function is absent in python3. Please "
"use 'six.text_type' instead.")
def check_raises(physical_line, filename):
"""Check raises usage
N354
"""
ignored_files = ["./tests/unit/test_hacking.py",
"./tests/hacking/checks.py"]
if filename not in ignored_files:
if re_raises.search(physical_line):
return (0, "N354 ':Please use ':raises Exception: conditions' "
"in docstrings.")
@skip_ignored_lines
def check_old_type_class(logical_line, physical_line, filename):
"""Use new-style Python classes
N355
"""
if re_old_type_class.search(logical_line):
yield (0, "N355 This class does not inherit from anything and thus "
"will be an old-style class by default. Try to inherit from "
"``object`` or another new-style class.")
@skip_ignored_lines
def check_datetime_alias(logical_line, physical_line, filename):
"""Ensure using ``dt`` as alias for ``datetime``
N356
"""
if re_datetime_alias.search(logical_line):
yield (0, "N356 Please use ``dt`` as alias for ``datetime``.")
@skip_ignored_lines
def check_no_six_iteritems(logical_line, physical_line, filename):
"""Check no six.iteritems
N357
"""
if re.search(r"\six.iteritems\(\)", logical_line):
yield (0, "N357 Use dict.items() instead of six.iteritems()")
@skip_ignored_lines
def check_db_imports_in_cli(logical_line, physical_line, filename):
"""Ensure that CLI modules do not use ``rally.common.db``
N360
"""
if (not filename.startswith("./rally/cli")
or filename == "./rally/cli/commands/db.py"):
return
if re_db_import.search(logical_line):
yield (0, "N360 CLI modules do not allow to work with "
"`rally.common.db``.")
@skip_ignored_lines
def check_objects_imports_in_cli(logical_line, physical_line, filename):
"""Ensure that CLI modules do not use ``rally.common.objects``
N361
"""
if not filename.startswith("./rally/cli"):
return
if re_objects_import.search(logical_line):
yield (0, "N361 CLI modules do not allow to work with "
"`rally.common.objects``.")
@skip_ignored_lines
def check_log_warn(logical_line, physical_line, filename):
if re_log_warn.search(logical_line):
yield(0, "N313 LOG.warn is deprecated, please use LOG.warning")
@skip_ignored_lines
def check_opts_import_path(logical_line, physical_line, filename):
"""Ensure that we load opts from correct paths only
N342
"""
excluded_files = ["./rally_openstack/__init__.py"]
forbidden_methods = [".register_opts("]
if filename not in excluded_files:
for forbidden_method in forbidden_methods:
if logical_line.find(forbidden_method) != -1:
yield (0, "N342 All options should be loaded from correct "
"paths only - rally_openstack.cfg module.")
def factory(register):
register(check_assert_methods_from_mock)
register(check_import_of_logging)
register(check_import_of_config)
register(no_use_conf_debug_check)
register(assert_true_instance)
register(assert_equal_type)
register(assert_equal_none)
register(assert_true_or_false_with_in)
register(assert_equal_in)
register(assert_equal_true_or_false)
register(check_no_direct_rally_objects_import)
register(check_no_oslo_deprecated_import)
register(check_quotes)
register(check_no_constructor_data_struct)
register(check_dict_formatting_in_string)
register(check_using_unicode)
register(check_raises)
register(check_datetime_alias)
register(check_db_imports_in_cli)
register(check_objects_imports_in_cli)
register(check_old_type_class)
register(check_no_six_iteritems)
register(check_log_warn)
register(check_opts_import_path)
| [
[
[
1052,
1061
],
[
3265,
3274
]
],
[
[
1069,
1071
],
[
1131,
1133
],
[
1231,
1233
],
[
1304,
1306
],
[
1430,
1432
],
[
1560,
1562
],
[
1637,
1639
],
[
1707,
1709
],
[
1796,
1798
],
[
1874,
1876
],
[
2020,
2022
],
[
2181,
2183
],
[
2318,
2320
],
[
2432,
2434
],
[
2486,
2488
],
[
2533,
2535
],
[
2820,
2822
],
[
2838,
2840
],
[
2925,
2927
],
[
2989,
2991
],
[
3058,
3060
],
[
3115,
3117
],
[
3183,
3185
],
[
18150,
18152
],
[
19533,
19535
]
],
[
[
1079,
1087
],
[
15815,
15823
],
[
16005,
16013
],
[
17819,
17827
],
[
17832,
17840
],
[
17945,
17953
]
],
[
[
1090,
1128
],
[
11805,
11843
]
],
[
[
1188,
1228
],
[
11879,
11919
]
],
[
[
1278,
1301
],
[
8602,
8625
]
],
[
[
1407,
1427
],
[
8953,
8973
]
],
[
[
1528,
1557
],
[
9388,
9417
]
],
[
[
1603,
1634
],
[
9321,
9352
]
],
[
[
1671,
1704
],
[
11290,
11323
]
],
[
[
1758,
1793
],
[
11219,
11254
]
],
[
[
1833,
1871
],
[
9987,
10025
]
],
[
[
1972,
2017
],
[
10061,
10106
]
],
[
[
2137,
2178
],
[
10692,
10733
]
],
[
[
2272,
2315
],
[
10769,
10812
]
],
[
[
2409,
2429
],
[
14957,
14977
]
],
[
[
2463,
2483
],
[
15094,
15114
]
],
[
[
2517,
2530
],
[
17189,
17202
]
],
[
[
2826,
2835
],
[
18557,
18566
]
],
[
[
2910,
2922
],
[
19948,
19960
]
],
[
[
2969,
2986
],
[
20337,
20354
]
],
[
[
3038,
3055
],
[
18856,
18873
]
],
[
[
3095,
3112
],
[
19278,
19295
]
],
[
[
3169,
3180
],
[
20575,
20586
]
],
[
[
3233,
3251
],
[
3871,
3889
],
[
6349,
6367
],
[
7145,
7163
],
[
7730,
7748
],
[
8437,
8455
],
[
8796,
8814
],
[
9139,
9157
],
[
9631,
9649
],
[
10332,
10350
],
[
11030,
11048
],
[
11546,
11564
],
[
12160,
12178
],
[
12956,
12974
],
[
13513,
13531
],
[
14756,
14774
],
[
18002,
18020
],
[
18710,
18728
],
[
19116,
19134
],
[
19392,
19410
],
[
19654,
19672
],
[
20091,
20109
],
[
20490,
20508
],
[
20684,
20702
]
],
[
[
3561,
3583
],
[
4578,
4600
]
],
[
[
3894,
3924
],
[
21278,
21308
]
],
[
[
6372,
6395
],
[
21323,
21346
]
],
[
[
7168,
7190
],
[
21361,
21383
]
],
[
[
7753,
7776
],
[
21398,
21421
]
],
[
[
8460,
8480
],
[
21436,
21456
]
],
[
[
8819,
8836
],
[
21471,
21488
]
],
[
[
9162,
9179
],
[
21503,
21520
]
],
[
[
9654,
9682
],
[
21535,
21563
]
],
[
[
10355,
10370
],
[
21578,
21593
]
],
[
[
11053,
11074
]
],
[
[
11569,
11595
],
[
21608,
21634
]
],
[
[
12183,
12219
],
[
21649,
21685
]
],
[
[
12979,
13010
],
[
21700,
21731
]
],
[
[
13536,
13548
],
[
21746,
21758
]
],
[
[
14779,
14811
],
[
21773,
21805
]
],
[
[
15225,
15256
],
[
21820,
21851
]
],
[
[
18025,
18044
],
[
21866,
21885
]
],
[
[
18321,
18333
],
[
21900,
21912
]
],
[
[
18733,
18753
],
[
22043,
22063
]
],
[
[
19139,
19159
],
[
21927,
21947
]
],
[
[
19415,
19437
],
[
22078,
22100
]
],
[
[
19677,
19700
],
[
21962,
21985
]
],
[
[
20114,
20142
],
[
22000,
22028
]
],
[
[
20513,
20527
],
[
22115,
22129
]
],
[
[
20707,
20729
],
[
22144,
22166
]
],
[
[
21246,
21253
]
]
] |
from setuptools import setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
setup(
name='carafe_layer_cuda',
ext_modules=[
CUDAExtension('carafe_layer_cuda', [
'src/carafe_layer_cuda.cpp',
'src/carafe_layer_kernel.cu',
])
],
cmdclass={
'build_ext': BuildExtension
})
| [
[
[
23,
28
],
[
98,
103
]
],
[
[
67,
81
],
[
335,
349
]
],
[
[
83,
96
],
[
161,
174
]
]
] |
from numpy import *
import numpy as np
# from numba import jit
# @jit
def detrending_coeff(win_len , order):
#win_len = 51
#order = 2
n = (win_len-1)/2
A = mat(ones((win_len,order+1)))
x = np.arange(-n , n+1)
for j in range(0 , order + 1):
A[:,j] = mat(x ** j).T
coeff_output = (A.T * A).I * A.T
return coeff_output , A
# coeff_output,A = detrending_coeff(5,2)
# print(coeff_output)
# print(A)
| [
[
[
18,
19
],
[
172,
175
],
[
176,
180
],
[
272,
275
]
],
[
[
28,
39
],
[
207,
209
]
],
[
[
82,
98
]
]
] |
# Author: Javad Amirian
# Email: amiryan.j@gmail.com
import xml.etree.ElementTree as et
import numpy as np
import pandas as pd
from opentraj.toolkit.core.trajdataset import TrajDataset
from opentraj.toolkit.utils.calibration.camera_calibration_tsai import *
def load_pets(path, **kwargs):
"""
:param path: address of annotation file
:param kwargs:
:param calib_path: address of calibration file
:return: TrajectoryDataset object
"""
traj_dataset = TrajDataset()
annot_xtree = et.parse(path)
annot_xroot = annot_xtree.getroot() # dataset
cp, cc = None, None # calibration parameters
# load calibration
calib_path = kwargs.get('calib_path', "")
if calib_path:
cp = CameraParameters()
cc = CalibrationConstants()
calib_xtree = et.parse(calib_path)
calib_xroot = calib_xtree.getroot() # Camera
geometry_node = calib_xroot.find("Geometry")
width = int(geometry_node.attrib["width"])
height = int(geometry_node.attrib["height"])
cp.Ncx = float(geometry_node.attrib["ncx"])
cp.Nfx = float(geometry_node.attrib["nfx"])
cp.dx = float(geometry_node.attrib["dx"])
cp.dy = float(geometry_node.attrib["dy"])
cp.dpx = float(geometry_node.attrib["dpx"])
cp.dpy = float(geometry_node.attrib["dpy"])
intrinsic_node = calib_xroot.find("Intrinsic")
cc.f = float(intrinsic_node.attrib["focal"])
cc.kappa1 = float(intrinsic_node.attrib["kappa1"]) # 1st order radial distortion
cp.Cx = float(intrinsic_node.attrib["cx"])
cp.Cy = float(intrinsic_node.attrib["cy"])
cp.sx = float(intrinsic_node.attrib["sx"])
extrinsic_node = calib_xroot.find("Extrinsic")
cc.Tx = float(extrinsic_node.attrib["tx"])
cc.Ty = float(extrinsic_node.attrib["ty"])
cc.Tz = float(extrinsic_node.attrib["tz"])
cc.Rx = float(extrinsic_node.attrib["rx"])
cc.Ry = float(extrinsic_node.attrib["ry"])
cc.Rz = float(extrinsic_node.attrib["rz"])
cc.calc_rr() # Calculate Rotation Matrix
loaded_data = [] # frame_id, agent_id, pos_x, pos_y, xc, yc, h, w
for frame_node in annot_xroot:
objectlist_node = frame_node.find("objectlist") # .text
object_nodes = objectlist_node.findall("object")
frame_id = int(frame_node.attrib.get("number"))
for obj_node in object_nodes:
agent_id = obj_node.attrib["id"]
box_node = obj_node.find("box")
xc = float(box_node.attrib["xc"])
yc = float(box_node.attrib["yc"])
h = float(box_node.attrib["h"])
w = float(box_node.attrib["w"])
x_ground = xc
y_ground = yc + h/2
if cp:
pos_x, pos_y = image_coord_to_world_coord(x_ground, y_ground, 0, cp, cc)
else:
pos_x, pos_y = np.nan, np.nan
loaded_data.append([frame_id, agent_id, pos_x / 1000., pos_y / 1000., xc, yc, h, w])
data_columns = ["frame_id", "agent_id", "pos_x", "pos_y",
"xc", "yc", "h", "w"]
raw_dataset = pd.DataFrame(np.array(loaded_data), columns=data_columns)
traj_dataset.title = kwargs.get('title', "PETS")
# copy columns
traj_dataset.data[["frame_id", "agent_id",
"pos_x", "pos_y"]] = \
raw_dataset[["frame_id", "agent_id",
"pos_x", "pos_y"]]
traj_dataset.data["scene_id"] = kwargs.get('scene_id', 0)
traj_dataset.data["label"] = "pedestrian"
# post-process
fps = kwargs.get('fps', 7)
sampling_rate = kwargs.get('sampling_rate', 1)
use_kalman = kwargs.get('use_kalman', False)
traj_dataset.postprocess(fps=fps, sampling_rate=sampling_rate, use_kalman=use_kalman)
return traj_dataset
| [
[
[
61,
88
],
[
515,
517
],
[
812,
814
]
],
[
[
97,
108
],
[
2931,
2933
],
[
2939,
2941
],
[
3180,
3182
]
],
[
[
116,
128
],
[
3167,
3169
]
],
[
[
176,
187
],
[
482,
493
]
],
[
[
259,
260
],
[
734,
750
],
[
766,
786
],
[
2824,
2850
]
],
[
[
267,
276
]
]
] |
"""
Production settings for Estatisticas Facebook project.
- Use WhiteNoise for serving static files
- Use Amazon's S3 for storing uploaded media
- Use mailgun to send emails
- Use Redis for cache
- Use sentry for error logging
"""
import logging
from .base import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env('DJANGO_SECRET_KEY')
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# raven sentry client
# See https://docs.sentry.io/clients/python/integrations/django/
INSTALLED_APPS += ['raven.contrib.django.raven_compat', ]
# Use Whitenoise to serve static files
# See: https://whitenoise.readthedocs.io/
WHITENOISE_MIDDLEWARE = ['whitenoise.middleware.WhiteNoiseMiddleware', ]
MIDDLEWARE = WHITENOISE_MIDDLEWARE + MIDDLEWARE
RAVEN_MIDDLEWARE = ['raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware']
MIDDLEWARE = RAVEN_MIDDLEWARE + MIDDLEWARE
# SECURITY CONFIGURATION
# ------------------------------------------------------------------------------
# See https://docs.djangoproject.com/en/dev/ref/middleware/#module-django.middleware.security
# and https://docs.djangoproject.com/en/dev/howto/deployment/checklist/#run-manage-py-check-deploy
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True)
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['danieldourado.com', ])
# END SITE CONFIGURATION
INSTALLED_APPS += ['gunicorn', ]
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-storages.readthedocs.io/en/latest/index.html
INSTALLED_APPS += ['storages', ]
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
control = 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIRY, AWS_EXPIRY)
AWS_HEADERS = {
'Cache-Control': bytes(control, encoding='latin-1')
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# Static Assets
# ------------------------
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='Estatisticas Facebook <noreply@danieldourado.com>')
EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[Estatisticas Facebook]')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# Anymail with Mailgun
INSTALLED_APPS += ['anymail', ]
ANYMAIL = {
'MAILGUN_API_KEY': env('DJANGO_MAILGUN_API_KEY'),
'MAILGUN_SENDER_DOMAIN': env('MAILGUN_SENDER_DOMAIN')
}
EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend'
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Use the Heroku-style specification
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db('DATABASE_URL')
DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60)
# CACHING
# ------------------------------------------------------------------------------
REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0)
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_LOCATION,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# Sentry Configuration
SENTRY_DSN = env('DJANGO_SENTRY_DSN')
SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient')
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['sentry', ],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'sentry': {
'level': 'ERROR',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console', ],
'propagate': False,
},
'raven': {
'level': 'DEBUG',
'handlers': ['console', ],
'propagate': False,
},
'sentry.errors': {
'level': 'DEBUG',
'handlers': ['console', ],
'propagate': False,
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'sentry', ],
'propagate': False,
},
},
}
SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO)
RAVEN_CONFIG = {
'CELERY_LOGLEVEL': env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO),
'DSN': SENTRY_DSN
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
# Your production stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
| [
[
[
245,
252
],
[
7208,
7215
],
[
7297,
7304
]
],
[
[
273,
274
],
[
550,
553
],
[
818,
832
],
[
1068,
1078
],
[
1654,
1657
],
[
1753,
1756
],
[
1934,
1937
],
[
2307,
2310
],
[
2709,
2712
],
[
2765,
2768
],
[
2827,
2830
],
[
3822,
3825
],
[
3964,
3967
],
[
4049,
4052
],
[
4195,
4198
],
[
4255,
4258
],
[
4553,
4562
],
[
4991,
4994
],
[
4968,
4977
],
[
5053,
5056
],
[
5014,
5023
],
[
5216,
5219
],
[
5775,
5778
],
[
5816,
5819
],
[
7173,
7176
],
[
7262,
7265
],
[
7397,
7400
]
],
[
[
537,
547
]
],
[
[
669,
692
]
],
[
[
818,
832
],
[
2399,
2413
]
],
[
[
958,
979
],
[
1044,
1065
]
],
[
[
1031,
1041
],
[
1211,
1221
]
],
[
[
1079,
1095
],
[
1192,
1208
]
],
[
[
1179,
1189
]
],
[
[
1596,
1615
]
],
[
[
1621,
1651
]
],
[
[
1723,
1750
]
],
[
[
1819,
1844
]
],
[
[
1852,
1873
]
],
[
[
1881,
1904
]
],
[
[
1912,
1931
]
],
[
[
1987,
2005
]
],
[
[
2013,
2033
]
],
[
[
2041,
2056
]
],
[
[
2291,
2304
]
],
[
[
2689,
2706
]
],
[
[
2741,
2762
]
],
[
[
2801,
2824
],
[
3495,
3518
]
],
[
[
2865,
2887
]
],
[
[
2895,
2915
]
],
[
[
2995,
3005
],
[
3264,
3274
],
[
3276,
3286
]
],
[
[
3208,
3215
],
[
3331,
3338
]
],
[
[
3288,
3299
]
],
[
[
3450,
3459
]
],
[
[
3519,
3539
]
],
[
[
3630,
3649
]
],
[
[
3801,
3819
],
[
4084,
4102
]
],
[
[
3941,
3961
]
],
[
[
4034,
4046
]
],
[
[
4160,
4167
]
],
[
[
4286,
4299
]
],
[
[
5182,
5196
],
[
5431,
5445
]
],
[
[
5331,
5337
]
],
[
[
5762,
5772
],
[
7323,
7333
]
],
[
[
5800,
5813
]
],
[
[
5902,
5909
]
],
[
[
7148,
7170
]
],
[
[
7222,
7234
]
],
[
[
7385,
7394
]
]
] |
# coding: utf-8
# Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department
# Distributed under the terms of "New BSD License", see the LICENSE file.
import os
import posixpath
import numpy as np
import pandas
import tables
import warnings
from pyiron_base import GenericParameters, Settings
from pyiron.atomistics.job.potentials import PotentialAbstract, find_potential_file_base
__author__ = "Jan Janssen"
__copyright__ = (
"Copyright 2020, Max-Planck-Institut für Eisenforschung GmbH - "
"Computational Materials Design (CM) Department"
)
__version__ = "1.0"
__maintainer__ = "Jan Janssen"
__email__ = "janssen@mpie.de"
__status__ = "development"
__date__ = "Sep 1, 2017"
s = Settings()
class VaspPotentialAbstract(PotentialAbstract):
"""
Args:
potential_df:
default_df:
selected_atoms:
"""
def __init__(self, potential_df=None, default_df=None, selected_atoms=None):
if potential_df is None:
potential_df = self._get_potential_df(
plugin_name="vasp",
file_name_lst={"potentials_vasp.csv"},
backward_compatibility_name="vasppotentials",
)
super(VaspPotentialAbstract, self).__init__(
potential_df=potential_df,
default_df=default_df,
selected_atoms=selected_atoms,
)
def default(self):
if self._default_df is not None:
return pandas.concat(
[
self._potential_df[
(
self._potential_df["Name"]
== self._default_df.loc[atom].values[0]
)
]
for atom in self._selected_atoms
]
)
return None
def find_default(self, element):
if isinstance(element, set):
element = element
elif isinstance(element, list):
element = set(element)
elif isinstance(element, str):
element = set([element])
else:
raise TypeError("Only, str, list and set supported!")
element_lst = list(element)
if self._default_df is not None:
merged_lst = list(set(self._selected_atoms + element_lst))
return pandas.concat(
[
self._potential_df[
(
self._potential_df["Name"]
== self._default_df.loc[atom].values[0]
)
]
for atom in merged_lst
]
)
return None
def find(self, element):
if isinstance(element, set):
element = element
elif isinstance(element, list):
element = set(element)
elif isinstance(element, str):
element = set([element])
else:
raise TypeError("Only, str, list and set supported!")
element_lst = list(element)
merged_lst = list(set(self._selected_atoms + element_lst))
return pandas.concat(
[super(VaspPotentialAbstract, self).find({atom}) for atom in merged_lst]
)
def list(self):
if len(self._selected_atoms) != 0:
return pandas.concat(
[
super(VaspPotentialAbstract, self).find({atom})
for atom in self._selected_atoms
]
)
else:
return pandas.DataFrame({})
def list_potential_names(self):
df = self.list()
if len(df) != 0:
return list(self.list()["Name"])
else:
return []
@staticmethod
def _return_potential_file(file_name):
for resource_path in s.resource_paths:
resource_path_potcar = os.path.join(
resource_path, "vasp", "potentials", file_name
)
if os.path.exists(resource_path_potcar):
return resource_path_potcar
return None
def __dir__(self):
return [val.replace("-", "_") for val in self.list_potential_names()]
def __getitem__(self, item):
item_replace = item.replace("_gga_pbe", "-gga-pbe").replace("_lda", "-lda")
if item_replace in self.list_potential_names():
df = self.list()
return self._return_potential_file(
file_name=list(df[df["Name"] == item_replace]["Filename"])[0][0]
)
selected_atoms = self._selected_atoms + [item]
return VaspPotentialAbstract(
potential_df=self._potential_df,
default_df=self._default_df,
selected_atoms=selected_atoms,
)
class VaspPotentialFile(VaspPotentialAbstract):
"""
The Potential class is derived from the PotentialAbstract class, but instead of loading the potentials from a list,
the potentials are loaded from a file.
Args:
xc (str): Exchange correlation functional ['PBE', 'LDA']
"""
def __init__(self, xc=None, selected_atoms=None):
potential_df = self._get_potential_df(
plugin_name="vasp",
file_name_lst={"potentials_vasp.csv"},
backward_compatibility_name="vasppotentials",
)
if xc == "PBE":
default_df = self._get_potential_default_df(
plugin_name="vasp",
file_name_lst={"potentials_vasp_pbe_default.csv"},
backward_compatibility_name="defaultvasppbe",
)
potential_df = potential_df[(potential_df["Model"] == "gga-pbe")]
elif xc == "GGA":
default_df = self._get_potential_default_df(
plugin_name="vasp",
file_name_lst={"potentials_vasp_pbe_default.csv"},
backward_compatibility_name="defaultvasppbe",
)
potential_df = potential_df[(potential_df["Model"] == "gga-pbe")]
elif xc == "LDA":
default_df = self._get_potential_default_df(
plugin_name="vasp",
file_name_lst={"potentials_vasp_lda_default.csv"},
backward_compatibility_name="defaultvasplda",
)
potential_df = potential_df[(potential_df["Model"] == "lda")]
else:
raise ValueError(
'The exchange correlation functional has to be set and it can either be "LDA" or "PBE"'
)
super(VaspPotentialFile, self).__init__(
potential_df=potential_df,
default_df=default_df,
selected_atoms=selected_atoms,
)
def add_new_element(self, parent_element, new_element):
"""
Adding a new user defined element with a different POTCAR file. It is assumed that the file exists
Args:
parent_element (str): Parent element
new_element (str): Name of the new element (the name of the folder where the new POTCAR file exists
"""
ds = self.find_default(element=parent_element)
ds["Species"].values[0][0] = new_element
path_list = ds["Filename"].values[0][0].split("/")
path_list[-2] = new_element
name_list = ds["Name"].values[0].split("-")
name_list[0] = new_element
ds["Name"].values[0] = "-".join(name_list)
ds["Filename"].values[0][0] = "/".join(path_list)
self._potential_df = self._potential_df.append(ds)
if new_element not in self._default_df.index.values:
ds = pandas.Series()
ds.name = new_element
ds["Name"] = "-".join(name_list)
self._default_df = self._default_df.append(ds)
else:
self._default_df.loc[new_element] = "-".join(name_list)
class VaspPotential(object):
"""
The Potential class is derived from the PotentialAbstract class, but instead of loading the potentials from a list,
the potentials are loaded from a file.
Args:
path (str): path to the potential list
"""
def __init__(self, selected_atoms=None):
self.pbe = VaspPotentialFile(xc="PBE", selected_atoms=selected_atoms)
self.lda = VaspPotentialFile(xc="LDA", selected_atoms=selected_atoms)
class VaspPotentialSetter(object):
def __init__(self, element_lst):
super(VaspPotentialSetter, self).__setattr__("_element_lst", element_lst)
super(VaspPotentialSetter, self).__setattr__(
"_potential_dict", {el: None for el in element_lst}
)
def __getattr__(self, item):
if item in self._element_lst:
return item
else:
raise AttributeError
def __setitem__(self, key, value):
self.__setattr__(key=key, value=value)
def __setattr__(self, key, value):
if key in self._element_lst:
self._potential_dict[key] = value
else:
raise AttributeError
def to_dict(self):
return self._potential_dict
def __repr__(self):
return self._potential_dict.__repr__()
def find_potential_file(path):
return find_potential_file_base(
path=path,
resource_path_lst=s.resource_paths,
rel_path=os.path.join("vasp", "potentials")
)
def get_enmax_among_species(symbol_lst, return_list=False, xc="PBE"):
"""
DEPRECATED: Please use `get_enmax_among_potentials`.
Given a list of species symbols, finds the largest applicable encut.
Args:
symbol_lst (list): The list of species symbols.
return_list (bool): Whether to return the list of all ENMAX values (in the same order as `species_lst` along with
the largest value). (Default is False.)
xc ("GGA"/"PBE"/"LDA"): The exchange correlation functional for which the POTCARs were generated. (Default is "PBE".)
Returns:
(float): The largest ENMAX among the POTCAR files for all the species.
[optional](list): The ENMAX value corresponding to each species.
"""
warnings.warn(("get_enmax_among_species is deprecated as of v0.3.0. Please use get_enmax_among_potentials and note "
+ "the adjustment to the signature (*args instead of list)"), DeprecationWarning)
return get_enmax_among_potentials(*symbol_lst, return_list=return_list, xc=xc)
def get_enmax_among_potentials(*names, return_list=False, xc="PBE"):
"""
Given potential names without XC information or elemental symbols, look over all the corresponding POTCAR files and
find the largest ENMAX value.
e.g. `get_enmax_among_potentials('Mg', 'Al_GW', 'Ca_pv', 'Ca_sv', xc='LDA')`
Args:
*names (str): Names of potentials or elemental symbols
return_list (bool): Whether to return the list of all ENMAX values (in the same order as `names` as a second
return value after providing the largest value). (Default is False.)
xc ("GGA"/"PBE"/"LDA"): The exchange correlation functional for which the POTCARs were generated.
(Default is "PBE".)
Returns:
(float): The largest ENMAX among the POTCAR files for all the requested names.
[optional](list): The ENMAX value corresponding to each species.
"""
def _get_just_element_from_name(name):
return name.split('_')[0]
def _get_index_of_exact_match(name, potential_names):
try:
return np.argwhere([name == strip_xc_from_potential_name(pn) for pn in potential_names])[0, 0]
except IndexError:
raise ValueError("Couldn't find {} among potential names for {}".format(name,
_get_just_element_from_name(name)))
def _get_potcar_filename(name, exch_corr):
potcar_table = VaspPotentialFile(xc=exch_corr).find(_get_just_element_from_name(name))
return potcar_table['Filename'].values[
_get_index_of_exact_match(name, potcar_table['Name'].values)
][0]
enmax_lst = []
for n in names:
with open(find_potential_file(path=_get_potcar_filename(n, xc))) as pf:
for i, line in enumerate(pf):
if i == 14:
encut_str = line.split()[2][:-1]
enmax_lst.append(float(encut_str))
break
if return_list:
return max(enmax_lst), enmax_lst
else:
return max(enmax_lst)
def strip_xc_from_potential_name(name):
return name.split('-')[0]
class Potcar(GenericParameters):
pot_path_dict = {"GGA": "paw-gga-pbe", "PBE": "paw-gga-pbe", "LDA": "paw-lda"}
def __init__(self, input_file_name=None, table_name="potcar"):
GenericParameters.__init__(
self,
input_file_name=input_file_name,
table_name=table_name,
val_only=False,
comment_char="#",
)
self._structure = None
self.electrons_per_atom_lst = list()
self.max_cutoff_lst = list()
self.el_path_lst = list()
self.el_path_dict = dict()
self.modified_elements = dict()
def potcar_set_structure(self, structure, modified_elements):
self._structure = structure
self._set_default_path_dict()
self._set_potential_paths()
self.modified_elements = modified_elements
def modify(self, **modify):
if "xc" in modify:
xc_type = modify["xc"]
self._set_default_path_dict()
if xc_type not in self.pot_path_dict:
raise ValueError("xc type not implemented: " + xc_type)
GenericParameters.modify(self, **modify)
if self._structure is not None:
self._set_potential_paths()
def _set_default_path_dict(self):
if self._structure is None:
return
vasp_potentials = VaspPotentialFile(xc=self.get("xc"))
for i, el_obj in enumerate(self._structure.get_species_objects()):
if isinstance(el_obj.Parent, str):
el = el_obj.Parent
else:
el = el_obj.Abbreviation
if isinstance(el_obj.tags, dict):
if "pseudo_potcar_file" in el_obj.tags.keys():
new_element = el_obj.tags["pseudo_potcar_file"]
vasp_potentials.add_new_element(
parent_element=el, new_element=new_element
)
key = vasp_potentials.find_default(el).Species.values[0][0]
val = vasp_potentials.find_default(el).Name.values[0]
self[key] = val
def _set_potential_paths(self):
element_list = (
self._structure.get_species_symbols()
) # .ElementList.getSpecies()
object_list = self._structure.get_species_objects()
s.logger.debug("element list: {0}".format(element_list))
self.el_path_lst = list()
try:
xc = self.get("xc")
except tables.exceptions.NoSuchNodeError:
xc = self.get("xc")
s.logger.debug("XC: {0}".format(xc))
vasp_potentials = VaspPotentialFile(xc=xc)
for i, el_obj in enumerate(object_list):
if isinstance(el_obj.Parent, str):
el = el_obj.Parent
else:
el = el_obj.Abbreviation
if (
isinstance(el_obj.tags, dict)
and "pseudo_potcar_file" in el_obj.tags.keys()
):
new_element = el_obj.tags["pseudo_potcar_file"]
vasp_potentials.add_new_element(
parent_element=el, new_element=new_element
)
el_path = find_potential_file(
path=vasp_potentials.find_default(new_element)["Filename"].values[
0
][0]
)
if not (os.path.isfile(el_path)):
raise ValueError("such a file does not exist in the pp directory")
elif el in self.modified_elements.keys():
new_element = self.modified_elements[el]
if os.path.isabs(new_element):
el_path = new_element
else:
vasp_potentials.add_new_element(
parent_element=el, new_element=new_element
)
el_path = find_potential_file(
path=vasp_potentials.find_default(new_element)["Filename"].values[
0
][0]
)
else:
el_path = find_potential_file(
path=vasp_potentials.find_default(el)["Filename"].values[0][0]
)
if not (os.path.isfile(el_path)):
raise AssertionError()
pot_name = "pot_" + str(i)
if pot_name in self._dataset["Parameter"]:
try:
ind = self._dataset["Parameter"].index(pot_name)
except (ValueError, IndexError):
indices = np.core.defchararray.find(
self._dataset["Parameter"], pot_name
)
ind = np.where(indices == 0)[0][0]
self._dataset["Value"][ind] = el_path
self._dataset["Comment"][ind] = ""
else:
self._dataset["Parameter"].append("pot_" + str(i))
self._dataset["Value"].append(el_path)
self._dataset["Comment"].append("")
self.el_path_lst.append(el_path)
def write_file(self, file_name, cwd=None):
"""
Args:
file_name:
cwd:
Returns:
"""
self.electrons_per_atom_lst = list()
self.max_cutoff_lst = list()
self._set_potential_paths()
if cwd is not None:
file_name = posixpath.join(cwd, file_name)
f = open(file_name, "w")
for el_file in self.el_path_lst:
with open(el_file) as pot_file:
for i, line in enumerate(pot_file):
f.write(line)
if i == 1:
self.electrons_per_atom_lst.append(int(float(line)))
elif i == 14:
mystr = line.split()[2][:-1]
self.max_cutoff_lst.append(float(mystr))
f.close()
def load_default(self):
file_content = """\
xc GGA # LDA, GGA
"""
self.load_string(file_content) | [
[
[
207,
209
],
[
3925,
3927
],
[
4031,
4033
],
[
9296,
9298
],
[
15938,
15940
],
[
16181,
16183
],
[
16825,
16827
]
],
[
[
217,
226
],
[
17974,
17983
]
],
[
[
235,
246
],
[
17154,
17156
],
[
17290,
17292
],
[
11469,
11471
]
],
[
[
254,
260
],
[
1491,
1497
],
[
2369,
2375
],
[
3180,
3186
],
[
3373,
3379
],
[
3592,
3598
],
[
7624,
7630
]
],
[
[
268,
274
],
[
15023,
15029
]
],
[
[
282,
290
],
[
10093,
10101
]
],
[
[
315,
332
],
[
12584,
12601
],
[
12763,
12780
],
[
13674,
13691
]
],
[
[
334,
342
],
[
740,
748
]
],
[
[
388,
405
],
[
781,
798
]
],
[
[
407,
431
],
[
9190,
9214
]
],
[
[
433,
443
]
],
[
[
460,
473
]
],
[
[
602,
613
]
],
[
[
622,
636
]
],
[
[
653,
662
]
],
[
[
683,
693
]
],
[
[
710,
718
]
],
[
[
736,
737
],
[
3872,
3873
],
[
9261,
9262
],
[
14872,
14873
],
[
15098,
15099
]
],
[
[
759,
780
],
[
4839,
4860
],
[
1241,
1262
],
[
3214,
3235
],
[
3432,
3453
],
[
4651,
4672
]
],
[
[
4821,
4838
],
[
6561,
6578
],
[
8193,
8210
],
[
8271,
8288
],
[
13915,
13932
],
[
15161,
15178
],
[
11865,
11882
]
],
[
[
7868,
7881
]
],
[
[
8338,
8357
],
[
8418,
8437
],
[
8500,
8519
]
],
[
[
9152,
9171
],
[
12129,
12148
],
[
15737,
15756
],
[
16445,
16464
],
[
16682,
16701
]
],
[
[
9343,
9366
]
],
[
[
10400,
10426
],
[
10322,
10348
]
],
[
[
12503,
12531
],
[
11490,
11518
]
],
[
[
12577,
12583
]
]
] |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1Event(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'action': 'str',
'api_version': 'str',
'deprecated_count': 'int',
'deprecated_first_timestamp': 'datetime',
'deprecated_last_timestamp': 'datetime',
'deprecated_source': 'V1EventSource',
'event_time': 'datetime',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'note': 'str',
'reason': 'str',
'regarding': 'V1ObjectReference',
'related': 'V1ObjectReference',
'reporting_controller': 'str',
'reporting_instance': 'str',
'series': 'V1beta1EventSeries',
'type': 'str'
}
attribute_map = {
'action': 'action',
'api_version': 'apiVersion',
'deprecated_count': 'deprecatedCount',
'deprecated_first_timestamp': 'deprecatedFirstTimestamp',
'deprecated_last_timestamp': 'deprecatedLastTimestamp',
'deprecated_source': 'deprecatedSource',
'event_time': 'eventTime',
'kind': 'kind',
'metadata': 'metadata',
'note': 'note',
'reason': 'reason',
'regarding': 'regarding',
'related': 'related',
'reporting_controller': 'reportingController',
'reporting_instance': 'reportingInstance',
'series': 'series',
'type': 'type'
}
def __init__(self, action=None, api_version=None, deprecated_count=None, deprecated_first_timestamp=None, deprecated_last_timestamp=None, deprecated_source=None, event_time=None, kind=None, metadata=None, note=None, reason=None, regarding=None, related=None, reporting_controller=None, reporting_instance=None, series=None, type=None):
"""
V1beta1Event - a model defined in Swagger
"""
self._action = None
self._api_version = None
self._deprecated_count = None
self._deprecated_first_timestamp = None
self._deprecated_last_timestamp = None
self._deprecated_source = None
self._event_time = None
self._kind = None
self._metadata = None
self._note = None
self._reason = None
self._regarding = None
self._related = None
self._reporting_controller = None
self._reporting_instance = None
self._series = None
self._type = None
self.discriminator = None
if action is not None:
self.action = action
if api_version is not None:
self.api_version = api_version
if deprecated_count is not None:
self.deprecated_count = deprecated_count
if deprecated_first_timestamp is not None:
self.deprecated_first_timestamp = deprecated_first_timestamp
if deprecated_last_timestamp is not None:
self.deprecated_last_timestamp = deprecated_last_timestamp
if deprecated_source is not None:
self.deprecated_source = deprecated_source
self.event_time = event_time
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if note is not None:
self.note = note
if reason is not None:
self.reason = reason
if regarding is not None:
self.regarding = regarding
if related is not None:
self.related = related
if reporting_controller is not None:
self.reporting_controller = reporting_controller
if reporting_instance is not None:
self.reporting_instance = reporting_instance
if series is not None:
self.series = series
if type is not None:
self.type = type
@property
def action(self):
"""
Gets the action of this V1beta1Event.
What action was taken/failed regarding to the regarding object.
:return: The action of this V1beta1Event.
:rtype: str
"""
return self._action
@action.setter
def action(self, action):
"""
Sets the action of this V1beta1Event.
What action was taken/failed regarding to the regarding object.
:param action: The action of this V1beta1Event.
:type: str
"""
self._action = action
@property
def api_version(self):
"""
Gets the api_version of this V1beta1Event.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1beta1Event.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta1Event.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta1Event.
:type: str
"""
self._api_version = api_version
@property
def deprecated_count(self):
"""
Gets the deprecated_count of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:return: The deprecated_count of this V1beta1Event.
:rtype: int
"""
return self._deprecated_count
@deprecated_count.setter
def deprecated_count(self, deprecated_count):
"""
Sets the deprecated_count of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:param deprecated_count: The deprecated_count of this V1beta1Event.
:type: int
"""
self._deprecated_count = deprecated_count
@property
def deprecated_first_timestamp(self):
"""
Gets the deprecated_first_timestamp of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:return: The deprecated_first_timestamp of this V1beta1Event.
:rtype: datetime
"""
return self._deprecated_first_timestamp
@deprecated_first_timestamp.setter
def deprecated_first_timestamp(self, deprecated_first_timestamp):
"""
Sets the deprecated_first_timestamp of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:param deprecated_first_timestamp: The deprecated_first_timestamp of this V1beta1Event.
:type: datetime
"""
self._deprecated_first_timestamp = deprecated_first_timestamp
@property
def deprecated_last_timestamp(self):
"""
Gets the deprecated_last_timestamp of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:return: The deprecated_last_timestamp of this V1beta1Event.
:rtype: datetime
"""
return self._deprecated_last_timestamp
@deprecated_last_timestamp.setter
def deprecated_last_timestamp(self, deprecated_last_timestamp):
"""
Sets the deprecated_last_timestamp of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:param deprecated_last_timestamp: The deprecated_last_timestamp of this V1beta1Event.
:type: datetime
"""
self._deprecated_last_timestamp = deprecated_last_timestamp
@property
def deprecated_source(self):
"""
Gets the deprecated_source of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:return: The deprecated_source of this V1beta1Event.
:rtype: V1EventSource
"""
return self._deprecated_source
@deprecated_source.setter
def deprecated_source(self, deprecated_source):
"""
Sets the deprecated_source of this V1beta1Event.
Deprecated field assuring backward compatibility with core.v1 Event type
:param deprecated_source: The deprecated_source of this V1beta1Event.
:type: V1EventSource
"""
self._deprecated_source = deprecated_source
@property
def event_time(self):
"""
Gets the event_time of this V1beta1Event.
Required. Time when this Event was first observed.
:return: The event_time of this V1beta1Event.
:rtype: datetime
"""
return self._event_time
@event_time.setter
def event_time(self, event_time):
"""
Sets the event_time of this V1beta1Event.
Required. Time when this Event was first observed.
:param event_time: The event_time of this V1beta1Event.
:type: datetime
"""
if event_time is None:
raise ValueError("Invalid value for `event_time`, must not be `None`")
self._event_time = event_time
@property
def kind(self):
"""
Gets the kind of this V1beta1Event.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta1Event.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta1Event.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta1Event.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta1Event.
:return: The metadata of this V1beta1Event.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta1Event.
:param metadata: The metadata of this V1beta1Event.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def note(self):
"""
Gets the note of this V1beta1Event.
Optional. A human-readable description of the status of this operation. Maximal length of the note is 1kB, but libraries should be prepared to handle values up to 64kB.
:return: The note of this V1beta1Event.
:rtype: str
"""
return self._note
@note.setter
def note(self, note):
"""
Sets the note of this V1beta1Event.
Optional. A human-readable description of the status of this operation. Maximal length of the note is 1kB, but libraries should be prepared to handle values up to 64kB.
:param note: The note of this V1beta1Event.
:type: str
"""
self._note = note
@property
def reason(self):
"""
Gets the reason of this V1beta1Event.
Why the action was taken.
:return: The reason of this V1beta1Event.
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""
Sets the reason of this V1beta1Event.
Why the action was taken.
:param reason: The reason of this V1beta1Event.
:type: str
"""
self._reason = reason
@property
def regarding(self):
"""
Gets the regarding of this V1beta1Event.
The object this Event is about. In most cases it's an Object reporting controller implements. E.g. ReplicaSetController implements ReplicaSets and this event is emitted because it acts on some changes in a ReplicaSet object.
:return: The regarding of this V1beta1Event.
:rtype: V1ObjectReference
"""
return self._regarding
@regarding.setter
def regarding(self, regarding):
"""
Sets the regarding of this V1beta1Event.
The object this Event is about. In most cases it's an Object reporting controller implements. E.g. ReplicaSetController implements ReplicaSets and this event is emitted because it acts on some changes in a ReplicaSet object.
:param regarding: The regarding of this V1beta1Event.
:type: V1ObjectReference
"""
self._regarding = regarding
@property
def related(self):
"""
Gets the related of this V1beta1Event.
Optional secondary object for more complex actions. E.g. when regarding object triggers a creation or deletion of related object.
:return: The related of this V1beta1Event.
:rtype: V1ObjectReference
"""
return self._related
@related.setter
def related(self, related):
"""
Sets the related of this V1beta1Event.
Optional secondary object for more complex actions. E.g. when regarding object triggers a creation or deletion of related object.
:param related: The related of this V1beta1Event.
:type: V1ObjectReference
"""
self._related = related
@property
def reporting_controller(self):
"""
Gets the reporting_controller of this V1beta1Event.
Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.
:return: The reporting_controller of this V1beta1Event.
:rtype: str
"""
return self._reporting_controller
@reporting_controller.setter
def reporting_controller(self, reporting_controller):
"""
Sets the reporting_controller of this V1beta1Event.
Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.
:param reporting_controller: The reporting_controller of this V1beta1Event.
:type: str
"""
self._reporting_controller = reporting_controller
@property
def reporting_instance(self):
"""
Gets the reporting_instance of this V1beta1Event.
ID of the controller instance, e.g. `kubelet-xyzf`.
:return: The reporting_instance of this V1beta1Event.
:rtype: str
"""
return self._reporting_instance
@reporting_instance.setter
def reporting_instance(self, reporting_instance):
"""
Sets the reporting_instance of this V1beta1Event.
ID of the controller instance, e.g. `kubelet-xyzf`.
:param reporting_instance: The reporting_instance of this V1beta1Event.
:type: str
"""
self._reporting_instance = reporting_instance
@property
def series(self):
"""
Gets the series of this V1beta1Event.
Data about the Event series this event represents or nil if it's a singleton Event.
:return: The series of this V1beta1Event.
:rtype: V1beta1EventSeries
"""
return self._series
@series.setter
def series(self, series):
"""
Sets the series of this V1beta1Event.
Data about the Event series this event represents or nil if it's a singleton Event.
:param series: The series of this V1beta1Event.
:type: V1beta1EventSeries
"""
self._series = series
@property
def type(self):
"""
Gets the type of this V1beta1Event.
Type of this event (Normal, Warning), new types could be added in the future.
:return: The type of this V1beta1Event.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1beta1Event.
Type of this event (Normal, Warning), new types could be added in the future.
:param type: The type of this V1beta1Event.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1Event):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
[
[
277,
284
],
[
17959,
17966
]
],
[
[
301,
310
],
[
17141,
17150
]
],
[
[
318,
320
]
],
[
[
329,
341
],
[
18228,
18240
]
]
] |
#task 1
nyaam = float (input('enter a length in cm: '))
if nyaam < 0:
print ('entry is invalid')
else:
res = nyaam / 2.54
print (res, 'inch')
#task 2
whoosh = int (input ('how many credits have you taken? '))
if whoosh > 0 and whoosh < 24:
print ('congrats, you a freshman!')
elif whoosh > 23 and whoosh < 54:
print ('congrats, you a sophomore!')
elif whoosh > 53 and whoosh < 84:
print ('congrats, you a junior!')
elif whoosh > 83:
print ('congrats, you a senior!')
elif whoosh <= 0:
print ('you haven\'t any credits, fool')
#task3
from random import randrange
jeffry = randrange(10)
goat = float (input ('guess the number between 0 n 10: '))
if goat == jeffry:
print ('you\'re right!')
else:
print ('that\'s not it, pal')
print (jeffry)
| [
[
[
9,
14
],
[
60,
65
],
[
118,
123
]
],
[
[
112,
115
],
[
143,
146
]
],
[
[
167,
173
],
[
229,
235
],
[
244,
250
],
[
302,
308
],
[
318,
324
],
[
377,
383
],
[
393,
399
],
[
449,
455
],
[
505,
511
]
],
[
[
592,
601
],
[
612,
621
]
],
[
[
603,
609
],
[
696,
702
],
[
780,
786
]
],
[
[
626,
630
],
[
688,
692
]
]
] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from mp.models.segmentation.unet_fepegar import UNet2D
### UNet Wrapper ###
class UNet2D_dis(UNet2D):
r"""Wrapper for UNet2D to access encoder and decoder seperately.
"""
def __init__(self, *args, **kwargs):
super(UNet2D_dis, self).__init__(*args, **kwargs)
def forward_enc(self, x):
skip_connections, encoding = self.encoder(x)
encoding = self.bottom_block(encoding)
return skip_connections, encoding
def forward_dec(self, skip_connections, encoding):
x = self.decoder(skip_connections, encoding)
if self.monte_carlo_layer is not None:
x = self.monte_carlo_layer(x)
return self.classifier(x)
### MODULES ###
class EncoderStyle(nn.Module):
r"""Style Encoder (VAE).
"""
def __init__(self, in_channels):
super(EncoderStyle, self).__init__()
layers = []
layers += [ConvBlock(in_channels=in_channels, out_channels=256)]
layers += [ConvPoolBlock(in_channels=256, out_channels=64, pooling=False)]
layers += [ConvPoolBlock(in_channels=64, out_channels=128, pooling=True)]
layers += [ConvPoolBlock(in_channels=128, out_channels=128, pooling=False)]
layers += [ConvPoolBlock(in_channels=128, out_channels=192, pooling=True)]
layers += [ConvPoolBlock(in_channels=192, out_channels=192, pooling=False)]
layers += [ConvPoolBlock(in_channels=192, out_channels=256, pooling=True)]
global_pool = [nn.LeakyReLU(), nn.AdaptiveMaxPool2d(output_size=(3,3))]
self.global_pool = nn.Sequential(*global_pool)
self.layers = nn.Sequential(*layers)
self.dense_mu = nn.Linear(in_features=3*3*256, out_features=1)
self.dense_var = nn.Linear(in_features=3*3*256, out_features=1)
def forward(self, x):
x = self.layers(x)
x = self.global_pool(x)
mu = self.dense_mu(x.view(x.shape[0], -1))
log_var = self.dense_var(x.view(x.shape[0], -1))
return [mu, log_var]
class LatentScaler(nn.Module):
r"""Scales samples from style encoding to be injected into the generator.
"""
def __init__(self, in_features):
super(LatentScaler, self).__init__()
layers = [nn.Linear(in_features=in_features, out_features=500), nn.LeakyReLU()]
layers += [nn.Linear(in_features=500, out_features=1024), nn.LeakyReLU()]
for _ in range(0, 2):
layers += [nn.Linear(in_features=1024, out_features=1024), nn.LeakyReLU()]
layers += [nn.Linear(in_features=1024, out_features=2560), nn.Tanh()]
self.layers = nn.Sequential(*layers)
def forward(self, x):
x = self.layers(x).reshape(x.shape[0],10,-1) # 10 occurences a 256 filters
return x
class Generator(nn.Module):
r"""Generator using content encoding, scaled style encoding (see LatentScaler) and domain_code to generate images.
"""
def __init__(self, in_channels, out_channels, domain_code_size):
super(Generator, self).__init__()
layers_BCIN = [ResBlockBCIN(in_channels=in_channels, out_channels=in_channels, layer_id=0, stride=1, padding=1, domain_code_size=domain_code_size)]
for i in range(0,4):
layers_BCIN += [ResBlockBCIN(in_channels=in_channels, out_channels=in_channels, layer_id=i+1, stride=1, padding=1, domain_code_size=domain_code_size)]
layers = [nn.ConvTranspose2d(in_channels=in_channels, out_channels=in_channels, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()]
layers += [nn.ConvTranspose2d(in_channels=in_channels, out_channels=128, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()]
layers += [nn.ConvTranspose2d(in_channels=128, out_channels=128, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()]
layers += [nn.ConvTranspose2d(in_channels=128, out_channels=64, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU()]
layers += [nn.ConvTranspose2d(in_channels=64, out_channels=out_channels, kernel_size=7, stride=1, padding=3), nn.Sigmoid()]
self.layers_BCIN = MultiInSequential(*layers_BCIN)
self.layers = nn.Sequential(*layers)
def forward(self, content, latent_scale, domain_code):
content, latent_scale, domain_code = self.layers_BCIN(content, latent_scale, domain_code)
x = self.layers(content)
return x
class DiscriminatorDomain(nn.Module):
r"""Domain Discriminator.
"""
def __init__(self, in_channels, domain_code_size, max_channels=512, kernel_size=4, stride=2):
super(DiscriminatorDomain, self).__init__()
layers = [ConvBlockBCIN(in_channels=in_channels, out_channels=64, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)]
layers += [ConvBlockBCIN(in_channels=64, out_channels=128, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)]
layers += [ConvBlockBCIN(in_channels=128, out_channels=max_channels//2, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)]
layers += [ConvBlockBCIN(in_channels=max_channels//2, out_channels=max_channels, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size)]
layers += [ConvBlockBCIN(in_channels=max_channels, out_channels=1, kernel_size=kernel_size, stride=stride, domain_code_size=domain_code_size, normalization='None')]
self.layers = MultiInSequential(*layers)
self.linear = nn.Linear(in_features=7**2, out_features=1)
self.activation = nn.Sigmoid()
def forward(self, x, domain_code):
x, domain_code = self.layers(x, domain_code)
x = x.view(x.shape[0],-1)
x = self.linear(x)
return x
class DiscriminatorContent(nn.Module):
r"""Unet-style Content Discriminator.
"""
def __init__(self, in_channels, domain_code_size, max_channels=512, kernel_size=3, stride=2):
super(DiscriminatorContent, self).__init__()
self.in_channels = 16
self.in_channels_max = 128
self.out_channels = 32
self.out_channels_max = 256
padding = 1
self.conv_0 = nn.Conv2d(in_channels=self.in_channels, out_channels=self.in_channels*2**1, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm_0 = nn.BatchNorm2d(self.in_channels*2**1)
self.activation_0 = nn.ReLU()
self.conv_1 = nn.Conv2d(in_channels=self.in_channels*2**1, out_channels=self.in_channels*2**2, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm_1 = nn.BatchNorm2d(self.in_channels*2**2)
self.activation_1 = nn.ReLU()
self.conv_2 = nn.Conv2d(in_channels=self.in_channels*2**2, out_channels=self.in_channels*2**3, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm_2 = nn.BatchNorm2d(self.in_channels*2**3)
self.activation_2 = nn.ReLU()
self.conv_3 = nn.Conv2d(in_channels=self.in_channels*2**3, out_channels=self.in_channels*2**4, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm_3 = nn.BatchNorm2d(self.in_channels*2**4)
self.activation_3 = nn.ReLU()
self.conv_4 = nn.Conv2d(in_channels=self.in_channels*2**4, out_channels=1, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm_4 = nn.BatchNorm2d(1)
self.activation_4 = nn.ReLU()
self.dense = nn.Linear(in_features = 8**2, out_features=domain_code_size)
self.softmax = nn.Softmax(dim=1)
def forward(self, skip_connections, content_x):
out = self.conv_0(skip_connections[0])
out = self.norm_0(out)
out = self.activation_0(out)
out = self.conv_1(skip_connections[1] + out)
out = self.norm_1(out)
out = self.activation_1(out)
out = self.conv_2(skip_connections[2] + out)
out = self.norm_2(out)
out = self.activation_2(out)
out = self.conv_3(skip_connections[3] + out)
out = self.norm_3(out)
out = self.activation_3(out)
out = self.conv_4(content_x + out)
out = self.norm_4(out)
out = self.activation_4(out)
out = self.dense(out.reshape(content_x.shape[0], -1))
out = self.softmax(out)
return out
def center_crop(self, skip_connection, x):
skip_shape = torch.tensor(skip_connection.shape)
x_shape = torch.tensor(x.shape)
crop = skip_shape[2:] - x_shape[2:]
half_crop = crop // 2
# If skip_connection is 10, 20, 30 and x is (6, 14, 12)
# Then pad will be (-2, -2, -3, -3, -9, -9)
pad = -torch.stack((half_crop, half_crop)).t().flatten()
skip_connection = F.pad(skip_connection, pad.tolist())
return skip_connection
### BUILDING BLOCKS ###
class ConvBlock(nn.Module):
r"""Convolutional Block with normalization and activation.
"""
def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.LeakyReLU, normalization='Instance'):
super(ConvBlock, self).__init__()
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.normalization = normalization
if self.normalization == 'Instance':
self.norm = nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable
if self.normalization =='BatchNorm':
self.norm = nn.BatchNorm2d(num_features=out_channels)
self.activation = activation()
def forward(self,x):
x = self.conv(x)
if self.normalization in ['Instance', 'BatchNorm']:
x = self.norm(x)
x = self.activation(x)
return x
class ConvPoolBlock(nn.Module):
r"""Convolutional Block with normalization, activation and pooling.
"""
def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, pooling=True, activation=nn.LeakyReLU):
super(ConvPoolBlock, self).__init__()
self.pooling = pooling
self.norm= nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable
self.activation = activation()
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.pool = nn.AvgPool2d(kernel_size=kernel_size)
def forward(self, x):
x = self.norm(x)
x = self.activation(x)
x = self.conv(x)
if self.pooling:
x = self.pool(x)
return x
class ConvBlockBCIN(nn.Module):
r"""Convolutional Block with BCIN normalization and activation.
"""
def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.LeakyReLU, domain_code_size=10, normalization='BCIN'):
super(ConvBlockBCIN, self).__init__()
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm = BCIN(out_channels, domain_code_size) # not learnable
self.activation = activation()
self.normalization = normalization
def forward(self, x, domain_code):
x = self.conv(x)
if self.normalization == 'BCIN':
x = self.norm(x, domain_code)
x = self.activation(x)
return x, domain_code
class ResBlockIN(nn.Module):
r"""Residual Block consisting of two convolutions with skip connection, instance normalization and activation.
"""
def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.ReLU):
super(ResBlockIN, self).__init__()
self.conv0 = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.conv1 = nn.Conv2d(in_channels=out_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm0 = nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable
self.norm1 = nn.InstanceNorm2d(num_features=out_channels, affine=False) # not learnable
self.activation = activation()
def forward(self, x):
x_in = x
x = self.conv0(x)
x = self.norm0(x)
x = self.activation(x)
x = self.conv1(x)
x = self.norm1(x)
x += self.center_crop(x_in, x)
return x
def center_crop(self, skip_connection, x):
skip_shape = torch.tensor(skip_connection.shape)
x_shape = torch.tensor(x.shape)
crop = skip_shape[2:] - x_shape[2:]
half_crop = crop // 2
# If skip_connection is 10, 20, 30 and x is (6, 14, 12)
# Then pad will be (-2, -2, -3, -3, -9, -9)
pad = -torch.stack((half_crop, half_crop)).t().flatten()
skip_connection = F.pad(skip_connection, pad.tolist())
return skip_connection
class ResBlockBCIN(nn.Module):
r"""Residual Block consisting of two convolutions with skip connection, BCIN normalization and activation.
"""
def __init__(self, in_channels=256, out_channels=256, kernel_size=3, stride=1, padding=0, activation=nn.ReLU, domain_code_size=10, layer_id=0):
super(ResBlockBCIN, self).__init__()
self.conv0 = nn.ConvTranspose2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.conv1 = nn.ConvTranspose2d(in_channels=out_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding)
self.norm0 = BCIN(num_features=out_channels, domain_code_size=domain_code_size, affine=True) # learnable
self.norm1 = BCIN(num_features=out_channels, domain_code_size=domain_code_size, affine=True) # learnable
self.activation = activation()
self.layer_id = layer_id
def forward(self, x, latent_scale, domain_code):
x_in = x
x = self.conv0(x)
x = torch.mul(x, latent_scale[:,self.layer_id*2,:][:,:,None,None])
x = self.norm0(x, domain_code)
x = self.activation(x)
x = self.conv1(x)
x = torch.mul(x, latent_scale[:,self.layer_id*2+1,:][:,:,None,None])
x = self.norm1(x, domain_code)
x += self.center_crop(x_in, x)
return x, latent_scale, domain_code
def center_crop(self, skip_connection, x):
skip_shape = torch.tensor(skip_connection.shape)
x_shape = torch.tensor(x.shape)
crop = skip_shape[2:] - x_shape[2:]
half_crop = crop // 2
# If skip_connection is 10, 20, 30 and x is (6, 14, 12)
# Then pad will be (-2, -2, -3, -3, -9, -9)
pad = -torch.stack((half_crop, half_crop)).t().flatten()
skip_connection = F.pad(skip_connection, pad.tolist())
return skip_connection
### NORMALIZATION ###
class BCIN(nn.Module):
r"""Central Biasing Instance Normalization
https://arxiv.org/abs/1806.10050
"""
def __init__(self, num_features, domain_code_size, affine=True, instance_norm=False, batch_norm=False):
super(BCIN, self).__init__()
self.W = nn.Parameter(torch.rand(domain_code_size), requires_grad=affine)
self.b = nn.Parameter(torch.rand(1), requires_grad=affine)
self.activation = nn.Tanh()
self.instance_norm = instance_norm
if self.instance_norm:
print('Using instance_norm instead of BCIN')
self.i_norm = torch.nn.InstanceNorm2d(num_features=num_features)
self.batch_norm = batch_norm
if self.instance_norm:
print('Using batch_norm instead of BCIN')
self.b_norm = torch.nn.BatchNorm2d(num_features=num_features)
def forward(self, x, domain_code):
x_var = torch.sqrt(torch.var(x, (1,2,3))) # instance std
x_mean = torch.mean(x, (1,2,3)) # instance mean
bias = torch.matmul(domain_code, self.W) * self.b
bias_scaled = self.activation(bias)
if self.instance_norm:
return self.i_norm(x)
if self.batch_norm:
return self.b_norm(x)
return ((x-x_mean[:,None,None,None]) / x_var[:,None,None,None]) + bias_scaled[:,None,None,None]
### HELPER MODULES ###
class MultiInSequential(nn.Sequential):
r"""Sequential class that allows multiple inputs for forward function
"""
def forward(self, *input):
for module in self._modules.values():
input = module(*input)
return input
| [
[
[
7,
12
],
[
8399,
8404
],
[
8453,
8458
],
[
8680,
8685
],
[
12611,
12616
],
[
12665,
12670
],
[
12892,
12897
],
[
14097,
14102
],
[
14278,
14283
],
[
14536,
14541
],
[
14590,
14595
],
[
14817,
14822
],
[
15274,
15279
],
[
15356,
15361
],
[
15583,
15588
],
[
15779,
15784
],
[
15883,
15888
],
[
15894,
15899
],
[
15949,
15954
],
[
16003,
16008
]
],
[
[
20,
34
],
[
787,
789
],
[
2084,
2086
],
[
2822,
2824
],
[
4481,
4483
],
[
5827,
5829
],
[
8865,
8867
],
[
9053,
9055
],
[
9840,
9842
],
[
10051,
10053
],
[
10676,
10678
],
[
10869,
10871
],
[
11497,
11499
],
[
11737,
11739
],
[
13056,
13058
],
[
13292,
13294
],
[
14995,
14997
],
[
16372,
16374
],
[
1535,
1537
],
[
1551,
1553
],
[
1619,
1621
],
[
1670,
1672
],
[
1718,
1720
],
[
1790,
1792
],
[
2283,
2285
],
[
2337,
2339
],
[
2372,
2374
],
[
2419,
2421
],
[
2489,
2491
],
[
2537,
2539
],
[
2573,
2575
],
[
2621,
2623
],
[
2655,
2657
],
[
3440,
3442
],
[
3565,
3567
],
[
3595,
3597
],
[
3712,
3714
],
[
3742,
3744
],
[
3851,
3853
],
[
3881,
3883
],
[
3989,
3991
],
[
4019,
4021
],
[
4118,
4120
],
[
4223,
4225
],
[
5541,
5543
],
[
5611,
5613
],
[
6216,
6218
],
[
6371,
6373
],
[
6437,
6439
],
[
6469,
6471
],
[
6629,
6631
],
[
6695,
6697
],
[
6727,
6729
],
[
6887,
6889
],
[
6953,
6955
],
[
6985,
6987
],
[
7145,
7147
],
[
7211,
7213
],
[
7243,
7245
],
[
7383,
7385
],
[
7429,
7431
],
[
7469,
7471
],
[
7553,
7555
],
[
9157,
9159
],
[
9397,
9399
],
[
9541,
9543
],
[
10164,
10166
],
[
10298,
10300
],
[
10437,
10439
],
[
10994,
10996
],
[
11811,
11813
],
[
11951,
11953
],
[
12093,
12095
],
[
12189,
12191
],
[
13401,
13403
],
[
13550,
13552
],
[
15261,
15263
],
[
15343,
15345
],
[
15419,
15421
]
],
[
[
42,
66
],
[
8756,
8757
],
[
12968,
12969
],
[
14893,
14894
]
],
[
[
115,
121
],
[
161,
167
]
],
[
[
150,
160
],
[
302,
312
]
],
[
[
774,
786
],
[
887,
899
]
],
[
[
2071,
2083
],
[
2233,
2245
]
],
[
[
2812,
2821
],
[
3044,
3053
]
],
[
[
4461,
4480
],
[
4643,
4662
]
],
[
[
5806,
5826
],
[
6001,
6021
]
],
[
[
8855,
8864
],
[
958,
967
],
[
9108,
9117
]
],
[
[
9826,
9839
],
[
1031,
1044
],
[
1114,
1127
],
[
1196,
1209
],
[
1280,
1293
],
[
1363,
1376
],
[
1447,
1460
],
[
10080,
10093
]
],
[
[
10662,
10675
],
[
4700,
4713
],
[
4851,
4864
],
[
4994,
5007
],
[
5150,
5163
],
[
5315,
5328
],
[
10941,
10954
]
],
[
[
11486,
11496
],
[
11761,
11771
]
],
[
[
13043,
13055
],
[
3096,
3108
],
[
3286,
3298
],
[
13349,
13361
]
],
[
[
14990,
14994
],
[
11133,
11137
],
[
13701,
13705
],
[
13814,
13818
],
[
15221,
15225
]
],
[
[
16354,
16371
],
[
4169,
4186
],
[
5491,
5508
]
]
] |
"""Tornado handlers for nbgrader assignment list web service."""
import os
import json
import contextlib
import traceback
from tornado import web
from notebook.utils import url_path_join as ujoin
from nbgrader.exchange import ExchangeFactory
from nbgrader.coursedir import CourseDirectory
from nbgrader.auth import Authenticator
from nbgrader.server_extensions.assignment_list.handlers import (
AssignmentList,
default_handlers,
BaseAssignmentHandler,
)
static = os.path.join(os.path.dirname(__file__), "static")
@contextlib.contextmanager
def chdir(dirname):
currdir = os.getcwd()
os.chdir(dirname)
yield
os.chdir(currdir)
class E2xAssignmentList(AssignmentList):
def submit_assignment(self, course_id, assignment_id):
with self.get_assignment_dir_config() as config:
try:
config = self.load_config()
config.CourseDirectory.course_id = course_id
config.CourseDirectory.assignment_id = assignment_id
coursedir = CourseDirectory(config=config)
authenticator = Authenticator(config=config)
submit = ExchangeFactory(config=config).Submit(
coursedir=coursedir, authenticator=authenticator, config=config
)
retval = submit.start()
hashcode = "Exchange not set up for hashcode"
timestamp = "Exchange not set up for timestamp"
if retval and len(retval) == 2:
hashcode, timestamp = retval
except Exception:
self.log.error(traceback.format_exc())
retvalue = {"success": False, "value": traceback.format_exc()}
else:
retvalue = {
"success": True,
"hashcode": hashcode,
"timestamp": timestamp,
}
self.log.info(retvalue)
return retvalue
class AssignmentActionHandler(BaseAssignmentHandler):
@web.authenticated
def post(self, action):
if action == "fetch":
assignment_id = self.get_argument("assignment_id")
course_id = self.get_argument("course_id")
self.manager.fetch_assignment(course_id, assignment_id)
self.finish(json.dumps(self.manager.list_assignments(course_id=course_id)))
elif action == "submit":
assignment_id = self.get_argument("assignment_id")
course_id = self.get_argument("course_id")
output = self.manager.submit_assignment(course_id, assignment_id)
if output["success"]:
response = self.manager.list_assignments(course_id=course_id)
response["hashcode"] = output["hashcode"]
response["timestamp"] = output["timestamp"]
self.finish(json.dumps(response))
else:
self.finish(json.dumps(output))
elif action == "fetch_feedback":
assignment_id = self.get_argument("assignment_id")
course_id = self.get_argument("course_id")
self.manager.fetch_feedback(course_id, assignment_id)
self.finish(json.dumps(self.manager.list_assignments(course_id=course_id)))
# -----------------------------------------------------------------------------
# URL to handler mappings
# -----------------------------------------------------------------------------
_assignment_action_regex = r"(?P<action>fetch|submit|fetch_feedback)"
e2x_default_handlers = [
(r"/assignments/%s" % _assignment_action_regex, AssignmentActionHandler),
]
def load_jupyter_server_extension(nbapp):
"""Load the nbserver"""
nbapp.log.info("Loading the assignment_list e2xgrader serverextension")
webapp = nbapp.web_app
webapp.settings["assignment_list_manager"] = E2xAssignmentList(parent=nbapp)
base_url = webapp.settings["base_url"]
webapp.add_handlers(
".*$",
[
(ujoin(base_url, pat), handler)
for pat, handler in e2x_default_handlers + default_handlers
],
)
| [
[
[
73,
75
],
[
481,
483
],
[
494,
496
],
[
594,
596
],
[
610,
612
],
[
642,
644
]
],
[
[
83,
87
],
[
2320,
2324
],
[
2871,
2875
],
[
2939,
2943
],
[
3208,
3212
]
],
[
[
95,
105
],
[
534,
544
]
],
[
[
113,
122
],
[
1623,
1632
],
[
1702,
1711
]
],
[
[
144,
147
],
[
2034,
2037
]
],
[
[
176,
198
],
[
4001,
4006
]
],
[
[
230,
245
],
[
1156,
1171
]
],
[
[
277,
292
],
[
1039,
1054
]
],
[
[
319,
332
],
[
1102,
1115
]
],
[
[
404,
418
],
[
686,
700
]
],
[
[
424,
440
],
[
4087,
4103
]
],
[
[
446,
467
],
[
2005,
2026
]
],
[
[
472,
478
]
],
[
[
564,
569
]
],
[
[
668,
685
],
[
3863,
3880
]
],
[
[
1981,
2004
],
[
3611,
3634
]
],
[
[
3463,
3487
],
[
3585,
3609
]
],
[
[
3534,
3554
],
[
4064,
4084
]
],
[
[
3645,
3674
]
]
] |
# Copyright 2016 Citrix Systems
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from os_xenapi.client.i18n import _
class OsXenApiException(Exception):
"""Base OsXenapi Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
message = self.msg_fmt % kwargs
self.message = message
super(OsXenApiException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class PluginRetriesExceeded(OsXenApiException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class SessionLoginTimeout(OsXenApiException):
msg_fmt = _("Unable to log in to XenAPI (is the Dom0 disk full?)")
| [
[
[
641,
642
],
[
913,
914
],
[
1635,
1636
],
[
1758,
1759
]
],
[
[
651,
668
],
[
1601,
1618
],
[
1724,
1741
],
[
1317,
1334
]
],
[
[
1579,
1600
]
],
[
[
1704,
1723
]
]
] |
import os
import shutil
from multiprocessing.pool import Pool
import cv2
import numpy as np
from functools import partial
from path import Path
def process_scene(input_directory, output_folder):
K = np.array([[525.0, 0.0, 320.0],
[0.0, 525.0, 240.0],
[0.0, 0.0, 1.0]])
print("processing", input_directory)
image_filenames = sorted(input_directory.files("*color.png"))
pose_filenames = sorted(input_directory.files("*pose.txt"))
poses = []
for pose_filename in pose_filenames:
pose = np.loadtxt(pose_filename)
poses.append(pose)
scene = input_directory.split("/")[-2]
seq = input_directory.split("/")[-1]
current_output_dir = output_folder / scene + "-" + seq
if os.path.isdir(current_output_dir):
if os.path.exists("{}/poses.txt".format(current_output_dir)) and os.path.exists("{}/K.txt".format(current_output_dir)):
return scene
else:
shutil.rmtree(current_output_dir)
os.mkdir(current_output_dir)
os.mkdir(os.path.join(current_output_dir, "images"))
output_poses = []
for current_index in range(len(image_filenames)):
image = cv2.imread(image_filenames[current_index])
output_poses.append(poses[current_index].ravel().tolist())
cv2.imwrite("{}/images/{}.png".format(current_output_dir, str(current_index).zfill(6)), image, [cv2.IMWRITE_PNG_COMPRESSION, 3])
output_poses = np.array(output_poses)
np.savetxt("{}/poses.txt".format(current_output_dir), output_poses)
np.savetxt("{}/K.txt".format(current_output_dir), K)
return scene
def main():
input_folder = Path("/home/share/dataset/7scenes")
output_folder = Path("/home/nhsmt1123/master-thesis/deep-video-mvs/data/7scenes")
input_directories = [
input_folder / "redkitchen/seq-01",
input_folder / "redkitchen/seq-07",
input_folder / "chess/seq-01",
input_folder / "chess/seq-02",
input_folder / "heads/seq-02",
input_folder / "fire/seq-01",
input_folder / "fire/seq-02",
input_folder / "office/seq-01",
input_folder / "office/seq-03",
input_folder / "pumpkin/seq-03",
input_folder / "pumpkin/seq-06",
input_folder / "stairs/seq-02",
input_folder / "stairs/seq-06", # train
input_folder / "redkitchen/seq-03",
input_folder / "chess/seq-03",
input_folder / "heads/seq-01",
input_folder / "fire/seq-03",
input_folder / "fire/seq-04",
input_folder / "office/seq-02",
input_folder / "pumpkin/seq-01",
input_folder / "stairs/seq-01"] # test
pool = Pool(6)
for finished_scene in pool.imap_unordered(partial(process_scene, output_folder=output_folder), input_directories):
print("finished", finished_scene)
pool.join()
pool.close()
if __name__ == '__main__':
main()
| [
[
[
7,
9
],
[
759,
761
],
[
805,
807
],
[
867,
869
],
[
1012,
1014
],
[
1045,
1047
],
[
1054,
1056
]
],
[
[
17,
23
],
[
973,
979
]
],
[
[
57,
61
],
[
2681,
2685
]
],
[
[
70,
73
],
[
1191,
1194
],
[
1311,
1314
],
[
1407,
1410
]
],
[
[
81,
92
],
[
206,
208
],
[
555,
557
],
[
1460,
1462
],
[
1487,
1489
],
[
1559,
1561
]
],
[
[
115,
122
],
[
2735,
2742
]
],
[
[
140,
144
],
[
1663,
1667
],
[
1719,
1723
]
],
[
[
151,
164
],
[
2743,
2756
]
],
[
[
1636,
1640
],
[
2917,
2921
]
]
] |
import pygame
import math
import glob
import os
tilesize = 128 # pixels per tile
def tiletosurface(tile):
pass
def maptosurface(sx,sy,ex,ey,oholmap):
pass
def main(windowsize,tilepipe,OHOLMap):
wt = math.floor(windowsize/tilesize)
cx,cy,first = 0,0,True
if OHOLMap.data != {}:
for x in OHOLMap.data:
for y in OHOLMap.data[x]:
if not first:
break
cx,cy = x,y
first = False
print("Loading sprites")
sprites = glob.glob("./OneLifeData/sprites/*.tga")
loadedsprites = {}
print("Found {} sprites, loading...".format(len(sprites)))
for sprite in sprites:
spriteid = os.path.basename(sprite).split(".")[0]
loadedsprites[spriteid] = pygame.image.load(sprite)
# do other loading things...
tilepipe.send("READY")
# main loop goes here
| [
[
[
7,
13
],
[
770,
776
]
],
[
[
21,
25
],
[
214,
218
]
],
[
[
33,
37
],
[
524,
528
]
],
[
[
45,
47
],
[
697,
699
]
],
[
[
48,
56
],
[
236,
244
]
],
[
[
86,
99
]
],
[
[
121,
133
]
],
[
[
170,
174
]
]
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import Http404
from core.models import (Category, Article, Source, BaseUserProfile,
BookmarkArticle, ArticleLike, HashTag, Menu, Notification, Devices,
SocialAccount, Category, CategoryAssociation,
TrendingArticle, Domain, DailyDigest, DraftMedia, Comment,
Subscription)
from rest_framework.authtoken.models import Token
from rest_framework.views import APIView
from .serializers import (CategorySerializer, ArticleSerializer, UserSerializer,
SourceSerializer, LoginUserSerializer, BaseUserProfileSerializer,
BookmarkArticleSerializer, ArticleLikeSerializer, HashTagSerializer,
MenuSerializer, NotificationSerializer, TrendingArticleSerializer,
ArticleCreateUpdateSerializer, DraftMediaSerializer, CommentSerializer,
CommentListSerializer, SubsMediaSerializer, UserProfileSerializer)
from rest_framework.response import Response
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework import filters
from newscout_web.constants import SOCIAL_AUTH_PROVIDERS
from django.db.models import Q
from rest_framework.exceptions import APIException
from collections import OrderedDict
from rest_framework import generics, viewsets
from rest_framework.pagination import CursorPagination
from rest_framework.generics import ListAPIView
from rest_framework.parsers import JSONParser
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from datetime import datetime, timedelta
from django.db.models import Count, Max, Min
import pytz
import uuid
from core.utils import es, ingest_to_elastic, delete_from_elastic
from elasticsearch_dsl import Search
import math
from rest_framework.utils.urls import replace_query_param
from google.auth.transport import requests as grequests
from google.oauth2 import id_token
import facebook
from .exception_handler import (create_error_response, TokenIDMissing, ProviderMissing,
SocialAuthTokenException)
import logging
import operator
from functools import reduce
import tweepy
import json
from captcha.models import CaptchaStore
from captcha.helpers import captcha_image_url
log = logging.getLogger(__name__)
def create_response(response_data):
"""
method used to create response data in given format
"""
response = OrderedDict()
response["header"] = {"status": "1"}
response["body"] = response_data
return response
def create_serializer_error_response(errors):
"""
methos is used to create error response for serializer errors
"""
error_list = []
for k, v in errors.items():
if isinstance(v, dict):
_, v = v.popitem()
d = {}
d["field"] = k
d["field_error"] = v[0]
error_list.append(d)
return OrderedDict({"header": {"status": "0"}, "errors": {
"errorList": error_list}})
class SignUpAPIView(APIView):
permission_classes = (AllowAny,)
def post(self, request, *args, **kwargs):
user_serializer = UserSerializer(data=request.data)
if user_serializer.is_valid():
user_serializer.save()
return Response(create_response({"Msg": "sign up successfully"}))
else:
return Response(
create_serializer_error_response(user_serializer.errors),
status=403)
class LoginFieldsRequired(APIException):
"""
api exception for no user found
"""
status_code = 401
default_detail = ("username and password are required")
default_code = "username_and_password"
class LoginAPIView(generics.GenericAPIView):
serializer_class = LoginUserSerializer
permission_classes = (AllowAny,)
def post(self, request, format=None):
serializer = LoginUserSerializer(data=request.data)
if not serializer.is_valid():
res_data = create_serializer_error_response(serializer.errors)
return Response(res_data, status=403)
user = BaseUserProfile.objects.filter(email=request.data["email"]).first()
device_name = request.data.get("device_name")
device_id = request.data.get("device_id")
if device_id and device_name:
device, _ = Devices.objects.get_or_create(user=user,
device_name=device_name,
device_id=device_id)
notification_obj, _ = Notification.objects.get_or_create(device=device)
notification = NotificationSerializer(notification_obj)
user_serializer = BaseUserProfileSerializer(user)
token, _ = Token.objects.get_or_create(user=user)
data = user_serializer.data
data["token"] = token.key
if device_id and device_name:
data["breaking_news"] = notification.data['breaking_news']
data["daily_edition"] = notification.data['daily_edition']
data["personalized"] = notification.data['personalized']
response_data = create_response({"user": data})
return Response(response_data)
class LogoutAPIView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request, format=None):
request.user.auth_token.delete()
return Response(create_response({"Msg": "User has been logged out"}))
class UserHashTagAPIView(APIView):
"""
Save new tags and remove older tags based on user selection
"""
permission_classes = (IsAuthenticated,)
parser_classes = (JSONParser,)
def post(self, request, format=None):
user = self.request.user
hash_tags = request.data["tags"]
user_tags = HashTag.objects.filter(name__in=hash_tags)
if user_tags:
user.passion.clear()
user.passion.add(*user_tags)
return Response(create_response({"Msg": "Successfully saved tags"}))
return Response(create_error_response({"Msg": "Invalid tags"}), status=400)
class CategoryListAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
"""
List all news category
"""
categories = CategorySerializer(Category.objects.all(), many=True)
return Response(create_response({"categories": categories.data}))
def post(self, request, format=None):
"""
Save new category to database
"""
if request.user.is_authenticated:
serializer = CategorySerializer(data=request.data, many=True)
if serializer.is_valid():
serializer.save()
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
raise Http404
def put(self, request, format=None):
"""
update category in database
"""
if request.user.is_authenticated:
_id = request.data.get("id")
category = Category.objects.get(id=_id)
serializer = CategorySerializer(category, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
raise Http404
class SourceListAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
"""
List all the sources
"""
source = SourceSerializer(Source.objects.all(), many=True)
return Response(create_response({"results": source.data}))
class NoarticleFound(APIException):
"""
api exception for no user found
"""
status_code = 404
default_detail = ("Article does not exist")
default_code = "no_article_found"
class PostpageNumberPagination(CursorPagination):
page_size = 10
page_size_query_param = 'page_size'
ordering = '-created_at'
class ArticleListAPIView(ListAPIView):
serializer_class = ArticleSerializer
permission_classes = (AllowAny,)
pagination_class = PostpageNumberPagination
filter_backends = (filters.OrderingFilter,)
ordering = ('-published_on',)
def get_queryset(self):
q = self.request.GET.get("q", "")
tag = self.request.GET.getlist("tag", "")
category = self.request.GET.getlist("category", "")
source = self.request.GET.getlist("source", "")
queryset = Article.objects.all()
if self.request.user.domain:
queryset = queryset.filter(domain=self.request.user.domain)
else:
queryset = Article.objects.none()
if source:
queryset = queryset.filter(source__name__in=source)
if category:
queryset = queryset.filter(category__name__in=category)
if tag:
queryset = queryset.filter(hash_tags__name__in=tag)
if q:
q_list = q.split(" ")
condition_1 = reduce(operator.or_, [Q(title__icontains=s) for s in q_list])
condition_2 = reduce(operator.or_, [Q(full_text__icontains=s) for s in q_list])
queryset = queryset.filter(condition_1 | condition_2)
return queryset
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
if serializer.data:
paginated_response = self.get_paginated_response(serializer.data)
return Response(create_response(paginated_response.data))
else:
return Response(create_error_response({"Msg": "News Doesn't Exist"}), status=400)
class ArticleDetailAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
slug = self.kwargs.get("slug", "")
user = self.request.user
article = Article.objects.filter(slug=slug).first()
has_subscribed = False
if not self.request.user.is_anonymous and \
Subscription.objects.filter(
user=self.request.user).exlcude(subs_type='Basic').exists():
has_subscribed = True
try:
next_article = Article.objects.filter(id__gt=article.id).order_by("id")[0:1].get().slug
except Exception as error:
print(error)
next_article = Article.objects.aggregate(Min("id"))['id__min']
try:
prev_article = Article.objects.filter(id__gt=article.id).order_by("-id")[0:1].get().slug
except Exception as error:
print(error)
prev_article = Article.objects.aggregate(Max("id"))['id__max']
if article:
response_data = ArticleSerializer(article, context={
"hash_tags_list": True, 'has_subscribed': has_subscribed}).data
if not user.is_anonymous:
book_mark_article = BookmarkArticle.objects.filter(
user=user, article=article).first()
like_article = ArticleLike.objects.filter(
user=user, article=article).first()
if book_mark_article:
response_data["isBookMark"] = True
else:
response_data["isBookMark"] = False
if like_article:
response_data["isLike"] = like_article.is_like
else:
response_data["isLike"] = 2
return Response(create_response({
"article": response_data, "next_article": next_article, "prev_article": prev_article}))
raise NoarticleFound
def post(self, request, *args, **kwargs):
if request.user.is_authenticated:
article_id = self.request.POST.get("article_id", "")
is_like = self.request.POST.get("isLike", "")
user = self.request.user
article = Article.objects.filter(id=article_id).first()
if article:
if is_like and int(is_like) <= 2:
article_like, created = ArticleLike.objects.get_or_create(
user=user, article=article)
article_like.is_like = is_like
article_like.save()
serializer = ArticleLikeSerializer(article_like)
return Response(create_response({
"Msg": "Article like status changed", "article": serializer.data
}))
else:
return Response(create_error_response({
"Msg": "Invalid Input"
}))
else:
return Response(create_error_response({"Msg": "News doesn't exist"}), status=400)
raise Http404
class ArticleBookMarkAPIView(APIView):
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
if request.data:
article_id = request.data["article_id"]
else:
article_id = self.request.POST.get("article_id", "")
user = self.request.user
if article_id:
article = Article.objects.filter(id=article_id).first()
if article:
bookmark_article, created = \
BookmarkArticle.objects.get_or_create(user=user,
article=article)
if not created:
del_bookmark_article = BookmarkArticleSerializer(bookmark_article)
del_bookmark = del_bookmark_article.data
del_bookmark["status"] = 0
bookmark_article.delete()
return Response(create_response({
"Msg": "Article removed from bookmark list", "bookmark_article": del_bookmark
}))
else:
bookmark_article = BookmarkArticleSerializer(bookmark_article)
return Response(create_response({
"Msg": "Article bookmarked successfully", "bookmark_article": bookmark_article.data
}))
raise NoarticleFound
class ArticleRecommendationsAPIView(APIView):
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
if response['hits']['hits']:
for result in response['hits']['hits']:
results.append(result["_source"])
return results
def get(self, request, *args, **kwargs):
article_id = self.kwargs.get("article_id", "")
if article_id:
results = es.search(index='recommendation', body={"query": {"match": {"id": int(article_id)}}})
if results['hits']['hits']:
recommendation = results['hits']['hits'][0]['_source']['recommendation']
search_results = es.search(index='article', body={
"query": {"terms": {"id": recommendation}}, "size": 25})
return Response(create_response({
"results": self.format_response(search_results)
}))
return Response(create_error_response({
"Msg": "Error generating recommendation"
}))
class ForgotPasswordAPIView(APIView):
permission_classes = (AllowAny,)
def genrate_password(self, password_length=10):
"""
Returns a random pasword of length password_length.
"""
random = str(uuid.uuid4())
random = random.upper()
random = random.replace("-", "")
return random[0:password_length]
def send_mail_to_user(self, email, password, first_name="", last_name=""):
username = first_name + " " + last_name
email_subject = 'NewsPost: Forgot Password Request'
email_body = """
<html>
<head>
</head>
<body>
<p>
Hello """ + username + """,<br><br><b>
""" + password + """</b> is your new password
<br>
<br>
Thanks,<br>
The NewsPost Team<br>
</p>
</body>
</html>"""
msg = EmailMultiAlternatives(
email_subject, '', settings.EMAIL_FROM, [email])
ebody = email_body
msg.attach_alternative(ebody, "text/html")
msg.send(fail_silently=False)
def post(self, request, *args, **kwargs):
email = request.data["email"]
if email:
user = BaseUserProfile.objects.filter(email=email)
if user:
user = user.first()
password = self.genrate_password()
self.send_mail_to_user(
email, password, user.first_name, user.last_name)
user.set_password(password)
user.save()
return Response(create_response({
"Msg": "New password sent to your email"
}))
return Response(create_error_response({
"Msg": "Email Does Not Exist"
}))
class ChangePasswordAPIView(APIView):
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
if request.data:
password = request.data["password"]
old_password = request.data["old_password"]
confirm_password = request.data["confirm_password"]
else:
password = self.request.POST.get("password", "")
old_password = self.request.POST.get("old_password", "")
confirm_password = self.request.POST.get("confirm_password", "")
user = self.request.user
if old_password:
if not user.check_password(old_password):
msg = "Old Password Does Not Match With User"
return Response(create_error_response({
"Msg": msg, "field": "old_password"
}))
if confirm_password != password:
msg = "Password and Confirm Password does not match"
return Response(create_error_response({
"Msg": msg, "field": "confirm_password"
}))
if old_password == password:
msg = "New password should not same as Old password"
return Response(create_error_response({
"Msg": msg, "field": "password"
}))
if user and password:
user.set_password(password)
user.save()
return Response(create_response({
"Msg": "Password changed successfully", "field": "confirm_password"
}))
else:
return Response(create_error_response({
"Msg": "Password field is required", "field": "password"
}))
else:
return Response(create_error_response({
"Msg": "Old Password field is required", "field": "old_password"
}))
class BookmarkArticleAPIView(APIView):
"""
This class is used to get user bookmark list
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
user = self.request.user
bookmark_list = BookmarkArticleSerializer(BookmarkArticle.objects.filter(user=user), many=True)
return Response(create_response({"results": bookmark_list.data}))
class ArticleLikeAPIView(APIView):
"""
This class is used to get user articles
"""
permission_classes = (IsAuthenticated,)
def get(self, request):
like_list = [0, 1]
user = self.request.user
article_list = ArticleLikeSerializer(ArticleLike.objects.filter(user=user, is_like__in=like_list), many=True)
return Response(create_response({"results": article_list.data}))
class HashTagAPIView(ListAPIView):
serializer_class = HashTagSerializer
permission_classes = (AllowAny,)
def get_queryset(self):
weekly = self.request.GET.get("weekly", "")
monthly = self.request.GET.get("monthly", "")
end = datetime.utcnow()
pst = pytz.timezone('Asia/Kolkata')
end = pst.localize(end)
utc = pytz.UTC
end = end.astimezone(utc)
articles = Article.objects.all()
queryset = HashTag.objects.all()
if weekly:
weekly = int(weekly)
start = end - timedelta(days=7 * weekly)
hash_tags = articles.filter(published_on__range=(start, end)).values(
'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10]
for hashtag in hash_tags:
hashtag['name'] = hashtag.pop('hash_tags__name')
queryset = hash_tags
if monthly:
monthly = int(monthly)
start = end - timedelta(days=30 * monthly)
hash_tags = articles.filter(published_on__range=(start, end)).values(
'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10]
for hashtag in hash_tags:
hashtag['name'] = hashtag.pop('hash_tags__name')
queryset = hash_tags
if not weekly and not monthly:
start = end - timedelta(days=1)
hash_tags = articles.filter(published_on__range=(start, end)).values(
'hash_tags__name').annotate(count=Count('hash_tags')).order_by('-count')[:10]
for hashtag in hash_tags:
hashtag['name'] = hashtag.pop('hash_tags__name')
queryset = hash_tags
return queryset
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
if serializer.data:
paginated_response = self.get_paginated_response(serializer.data)
return Response(create_response(paginated_response.data))
else:
return Response(create_error_response({"Msg": "No trending tags"}), status=400)
serializer = self.get_serializer(queryset, many=True)
return Response(create_response(serializer.data))
class ArticleSearchAPI(APIView):
"""
this view is used for article search and filter
"""
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
filters = {}
if response.hits.hits:
for result in response.hits.hits:
source = result["_source"]
if 'highlight' in result:
if 'title' in result['highlight']:
source['title'] = " ".join(result['highlight']['title'])
if 'blurb' in result['highlight']:
source['blurb'] = " ".join(result['highlight']['blurb'])
results.append(source)
if response.aggregations.category.buckets:
filters["category"] = sorted(
response.aggregations.category.buckets._l_,
key=operator.itemgetter("key"))
if response.aggregations.source.buckets:
filters["source"] = sorted(
response.aggregations.source.buckets._l_,
key=operator.itemgetter("key"))
if response.aggregations.hash_tags.buckets:
filters["hash_tags"] = sorted(
response.aggregations.hash_tags.buckets._l_,
key=operator.itemgetter("key"))
return results, filters
def get(self, request):
page = self.request.GET.get("page", "1")
if page.isdigit():
page = int(page)
else:
page = 1
size = self.request.GET.get("rows", "20")
if size.isdigit():
size = int(size)
else:
size = 20
query = self.request.GET.get("q", "")
source = self.request.GET.getlist("source", [])
category = self.request.GET.getlist("category", [])
domain = self.request.GET.getlist("domain", [])
tags = self.request.GET.getlist("tag", [])
sort = self.request.GET.get("sort", "desc")
if not domain:
return Response(create_serializer_error_response({"domain": ["Domain id is required"]}))
# mort like this for related queries
mlt_fields = ["has_tags"]
if source:
mlt_fields = ["has_tags", "source", "domain"]
mlt = Search(using=es, index="article").query("more_like_this", fields=mlt_fields,
like=query, min_term_freq=1, max_query_terms=12).source(mlt_fields)
mlt.execute()
sr = Search(using=es, index="article")
# highlight title and blurb containing query
sr = sr.highlight("title", "blurb", fragment_size=20000)
# generate elastic search query
must_query = [{"wildcard": {"cover_image": "*"}}]
should_query = []
if query:
query = query.lower()
must_query.append({"multi_match": {"query": query,
"fields": ["title", "blurb"], 'type': 'phrase'}})
if tags:
tags = [tag.lower().replace("-", " ") for tag in tags]
for tag in tags:
sq = {"match_phrase": {"hash_tags": tag}}
should_query.append(sq)
if must_query:
sr = sr.query("bool", must=must_query)
if should_query:
if len(should_query) > 1:
sr = sr.filter("bool", should=should_query)
else:
sr = sr.filter("bool", should=should_query[0])
if domain:
sr = sr.filter("terms", domain=list(domain))
if category:
cat_objs = Category.objects.filter(name__in=category)
category = cat_objs.values_list("id", flat=True)
cat_assn_objs = CategoryAssociation.objects.filter(
parent_cat__in=cat_objs).values_list(
"child_cat__id", flat=True)
if cat_assn_objs:
new_category = set(list(cat_assn_objs) + list(category))
sr = sr.filter("terms", category_id=list(new_category))
else:
if category:
sr = sr.filter("terms", category_id=list(category))
if source:
source = [s.lower() for s in source]
sr = sr.filter("terms", source__keyword=source)
sr = sr.sort({"article_score": {"order": sort}})
sr = sr.sort({"published_on": {"order": sort}})
# pagination
start = (page - 1) * size
end = start + size
sr = sr[start:end]
# generate facets
sr.aggs.bucket("category", "terms", field="category.keyword")
sr.aggs.bucket("source", "terms", field="source.keyword")
sr.aggs.bucket("hash_tags", "terms", field="hash_tags.keyword", size=50)
# execute query
response = sr.execute()
results, filters = self.format_response(response)
count = response["hits"]["total"]
total_pages = math.ceil(count / size)
url = request.build_absolute_uri()
if end < count:
next_page = page + 1
next_url = replace_query_param(url, "page", next_page)
else:
next_url = None
if page != 1:
previous_page = page - 1
previous_url = replace_query_param(url, "page", previous_page)
else:
previous_url = None
data = {
"results": results,
"filters": filters,
"count": count,
"total_pages": total_pages,
"current_page": page,
"next": next_url,
"previous": previous_url
}
return Response(create_response(data))
class MenuAPIView(APIView):
"""
This Api will return all the menus
"""
permission_classes = (AllowAny,)
def get(self, request):
domain_id = self.request.GET.get("domain")
if not domain_id:
return Response(create_error_response({"domain": ["Domain id is required"]}))
domain = Domain.objects.filter(domain_id=domain_id).first()
if not domain:
return Response(create_error_response({"domain": ["Domain id is required"]}))
menus = MenuSerializer(Menu.objects.filter(domain=domain), many=True)
menus_list = menus.data
new_menulist = []
for menu in menus_list:
menu_dict = {}
menu_dict['heading'] = menu
new_menulist.append(menu_dict)
return Response(create_response({'results': new_menulist}))
class DevicesAPIView(APIView):
"""
this api will add device_id and device_name
"""
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
user = self.request.user
device_id = self.request.POST.get("device_id", "")
device_name = self.request.POST.get("device_name", "")
if not user.is_anonymous and device_id and device_name:
user_device = Devices.objects.filter(user=user.pk)
if user_device:
user_device.update(device_id=device_id, device_name=device_name, user=user.id)
return Response(create_response({"Msg": "Device successfully created"}))
elif not user_device:
get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name, user=user.id)
if created:
return Response(create_response({"Msg": "Device successfully created"}))
else:
return Response(create_response({"Msg": "Device already exist"}))
elif device_id and device_name:
get, created = Devices.objects.get_or_create(device_id=device_id, device_name=device_name)
if created:
return Response(create_response({"Msg": "Device successfully created"}))
else:
return Response(create_response({"Msg": "Device already exist"}))
else:
return Response(create_error_response({"Msg": "device_id and device_name field are required"}))
class NotificationAPIView(APIView):
"""
this api will add notification data
"""
permission_classes = (AllowAny,)
def post(self, request):
device_id = request.data["device_id"]
device_name = request.data["device_name"]
breaking_news = request.data["breaking_news"]
daily_edition = request.data["daily_edition"]
personalized = request.data["personalized"]
device = Devices.objects.get(device_id=device_id, device_name=device_name)
if breaking_news and daily_edition and personalized and device:
notification = Notification.objects.filter(device=device)
if notification:
notification.update(breaking_news=breaking_news, daily_edition=daily_edition, personalized=personalized)
return Response(create_response({"Msg": "Notification updated successfully"}))
Notification.objects.create(breaking_news=breaking_news, daily_edition=daily_edition,
personalized=personalized, device=device)
return Response(create_response({"Msg": "Notification created successfully"}))
else:
return Response(
create_error_response(
{"Msg": "device_id, device_name, breaking_news, daily_edition and personalized are required"}))
def get(self, request):
device_id = request.GET.get("device_id")
device_name = request.GET.get("device_name")
device = Devices.objects.filter(device_id=device_id, device_name=device_name).first()
if device:
notification = NotificationSerializer(Notification.objects.fitler(device=device), many=True)
return Response(create_response(notification.data))
return Response(create_error_response({"Msg": "Invalid device_id or device_name"}))
class SocialLoginView(generics.GenericAPIView):
"""
this view is used for google social authentication and login
"""
permission_classes = (AllowAny,)
serializer_class = BaseUserProfileSerializer
def decode_google_token(self, token_id):
"""
this method is used to decode and verify google token
"""
request = grequests.Request()
try:
id_info = id_token.verify_oauth2_token(token_id, request)
return id_info
except Exception as e:
log.debug("error in google token verification {0}".format(e))
return False
def get_name_details(self, id_info):
"""
this methos is used to get first name and last name from id_info
details
"""
first_name = last_name = ""
if "name" in id_info:
name = id_info.get("name")
name_list = name.split(" ")
first_name = name_list[0]
if len(name_list) > 1:
last_name = " ".join(name_list[1:])
if not first_name:
if "given_name" in id_info:
first_name = id_info.get("given_name")
if not last_name:
if "family_name" in id_info:
last_name = id_info.get("family_name")
return first_name, last_name
def create_user_profile(self, first_name, last_name, username, email, image_url, sid, provider):
"""
this method is used to create base user profile object for given
social account
"""
user = BaseUserProfile.objects.filter(email=email).first()
created = ""
if not user:
user = BaseUserProfile.objects.create(
first_name=first_name,
last_name=last_name,
email=email,
username=username
)
sa_obj, created = SocialAccount.objects.get_or_create(
social_account_id=sid,
image_url=image_url,
user=user,
provider=provider
)
# create_profile_image.delay(sa_obj.id)
return user, created
def get_facebook_data(self, token_id):
"""
this method is used to get facebook user data from given access token
"""
graph = facebook.GraphAPI(access_token=token_id)
try:
res_data = graph.get_object(
id='me?fields=email,id,first_name,last_name,name,picture.width(150).height(150)')
return res_data
except Exception as e:
log.debug("error in facebook fetch data: {0}".format(e))
return False
def get_facebook_name_details(self, profile_data):
"""
this method is used to get facebook first_name last_name from profile
data
"""
name = first_name = last_name = ""
if "first_name" in profile_data:
first_name = profile_data.get("first_name")
if "last_name" in profile_data:
last_name = profile_data.get("last_name")
if "name" in profile_data:
name = profile_data.get("name")
name_list = name.split(" ")
if not first_name:
first_name = name_list[0]
if not last_name:
last_name = " ".join(name[1:])
return first_name, last_name
def get_user_serialize_data(self, email, device_id, device_name):
"""
this method will return customize user data
"""
user = BaseUserProfile.objects.filter(email=email).first()
device = Devices.objects.filter(user=user.id)
if device:
device.update(device_name=device_name, device_id=device_id)
else:
device, created = Devices.objects.get_or_create(device_name=device_name, device_id=device_id)
Devices.objects.filter(pk=device.pk).update(user=user)
notification = NotificationSerializer(Notification.objects.get_or_create(device=device), many=True)
token, _ = Token.objects.get_or_create(user=user)
data = BaseUserProfileSerializer(user).data
data["token"] = token.key
data["breaking_news"] = notification.data[0]['breaking_news']
data["daily_edition"] = notification.data[0]['daily_edition']
data["personalized"] = notification.data[0]['personalized']
return data
def post(self, request, *args, **kwargs):
"""
this is post method for collection google social auth data
and generate authentication api token for user
"""
token_id = request.data.get("token_id")
provider = request.data.get("provider")
device_id = request.data.get("device_id")
device_name = request.data.get("device_name")
if not token_id:
raise TokenIDMissing()
if not provider:
raise ProviderMissing()
if not device_id:
return Response(create_error_response({"Msg": "device_id is missing or Invalid device_id"}))
if not device_name:
return Response(create_error_response({"Msg": "device_name is missing or Invalid device_name"}))
if provider not in SOCIAL_AUTH_PROVIDERS:
raise ProviderMissing()
if provider == "google":
id_info = self.decode_google_token(token_id)
if not id_info:
raise SocialAuthTokenException()
first_name, last_name = self.get_name_details(id_info)
email = id_info.get("email", "")
if not email:
raise SocialAuthTokenException()
username = email.split("@")[0]
google_id = id_info.get("sub", "")
image_url = id_info.get("picture", "")
user, created = self.create_user_profile(
first_name, last_name, username, email, image_url, google_id, provider)
user_data = self.get_user_serialize_data(email, device_id, device_name)
return Response(create_response({"user": user_data}))
if provider == "facebook":
profile_data = self.get_facebook_data(token_id)
if not profile_data:
raise SocialAuthTokenException()
first_name, last_name = self.get_facebook_name_details(
profile_data)
email = profile_data.get("email")
if not email:
raise SocialAuthTokenException()
username = username = email.split("@")[0]
facebook_id = profile_data.get("id", "")
image_url = ""
if "picture" in profile_data:
if "data" in profile_data["picture"]:
image_url = profile_data["picture"]["data"]["url"]
user, created = self.create_user_profile(
first_name, last_name, username, email, image_url, facebook_id, provider)
user_data = self.get_user_serialize_data(email, device_id, device_name)
return Response(create_response({"user": user_data}))
raise ProviderMissing()
class TrendingArticleAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request, format=None, *args, **kwargs):
"""
List all the trending articles
"""
domain_id = self.request.GET.get("domain")
if not domain_id:
return Response(create_error_response({"domain": ["Domain id is required"]}))
domain = Domain.objects.filter(domain_id=domain_id).first()
if not domain:
return Response(create_error_response({"domain": ["Invalid domain name"]}))
source = TrendingArticleSerializer(TrendingArticle.objects.filter(domain=domain), many=True)
return Response(create_response({"results": source.data}))
class SocailMediaPublishing():
"""
this class is to update news arrticles on social media
"""
def twitter(self, data):
"""
this function will tweet article title and its url in twitter
"""
try:
auth = tweepy.OAuthHandler(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET)
auth.set_access_token(settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
api.update_status(data["title"] + "\n" + data["url"])
except Exception as e:
print("Error in twitter post: ", e)
class ArticleCreateUpdateView(APIView, SocailMediaPublishing):
"""
Article create update view
"""
permission_classes = (IsAuthenticated,)
def get_tags(self, tags):
"""
this method will return tag name from tags objects
"""
tag_list = []
for tag in tags:
tag_list.append(tag["name"])
return tag_list
def publish(self, obj):
serializer = ArticleSerializer(obj)
json_data = serializer.data
if json_data["hash_tags"]:
tag_list = self.get_tags(json_data["hash_tags"])
json_data["hash_tags"] = tag_list
ingest_to_elastic([json_data], "article", "article", "id")
tweet_data = {
"title": serializer.instance.title,
"url": serializer.instance.source_url,
}
self.twitter(tweet_data)
def post(self, request):
publish = request.data.get("publish")
# origin is used to join with cover image
# to generate proper image url
origin = request.META.get("HTTP_ORIGIN")
cover_image_id = request.data.get("cover_image_id")
if cover_image_id:
DraftMedia.objects.filter(id=cover_image_id).delete()
if not request.data.get("cover_image"):
request.data["cover_image"] = "/".join(
[origin, request.user.domain.default_image.url])
context = {"publish": publish, "user": request.user}
serializer = ArticleCreateUpdateSerializer(
data=request.data, context=context)
if serializer.is_valid():
serializer.save()
if publish:
self.publish(serializer.instance)
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
def put(self, request):
_id = request.data.get("id")
publish = request.data.get("publish")
# origin is used to join with cover image
# to generate proper image url
origin = request.META.get("HTTP_ORIGIN")
cover_image_id = request.data.get("cover_image_id")
if cover_image_id:
DraftMedia.objects.filter(id=cover_image_id).delete()
if not request.data.get("cover_image"):
request.data["cover_image"] = "/".join(
[origin, request.user.domain.default_image.url])
context = {"publish": publish, "user": request.user}
article = Article.objects.get(id=_id)
serializer = ArticleCreateUpdateSerializer(
article, data=request.data, context=context)
if serializer.is_valid():
serializer.save()
if publish:
self.publish(serializer.instance)
return Response(create_response(serializer.data))
return Response(create_error_response(serializer.errors), status=400)
class ChangeArticleStatusView(APIView, SocailMediaPublishing):
"""
this view is used to update status of given article activate or deactivate
"""
permission_classes = (IsAuthenticated,)
def get_tags(self, tags):
"""
this method will return tag name from tags objects
"""
tag_list = []
for tag in tags:
tag_list.append(tag["name"])
return tag_list
def publish(self, obj):
serializer = ArticleSerializer(obj)
json_data = serializer.data
if obj.active:
if json_data["hash_tags"]:
tag_list = self.get_tags(json_data["hash_tags"])
json_data["hash_tags"] = tag_list
ingest_to_elastic([json_data], "article", "article", "id")
tweet_data = {
"title": serializer.instance.title,
"url": serializer.instance.source_url,
}
self.twitter(tweet_data)
else:
delete_from_elastic([json_data], "article", "article", "id")
def post(self, request):
_id = request.data.get("id")
article = Article.objects.filter(id=_id).first()
if not article:
return Response(create_error_response({"error": "Article does not exists"}), status=400)
article.active = request.data.get("activate")
article.save()
self.publish(article)
return Response(create_response({
"id": article.id, "active": article.active}))
class CategoryBulkUpdate(APIView):
"""
update whole bunch of articles in one go
"""
permission_classes = (IsAuthenticated,)
def get_tags(self, tags):
"""
this method will return tag name from tags objects
"""
tag_list = []
for tag in tags:
tag_list.append(tag["name"])
return tag_list
def post(self, request):
category_id = request.data['categories']
category = Category.objects.get(id=category_id)
for article_id in request.data['articles']:
current = Article.objects.get(id=article_id)
current.category = category
current.save()
serializer = ArticleSerializer(current)
json_data = serializer.data
delete_from_elastic([json_data], "article", "article", "id")
if json_data["hash_tags"]:
tag_list = self.get_tags(json_data["hash_tags"])
json_data["hash_tags"] = tag_list
ingest_to_elastic([json_data], "article", "article", "id")
return Response({"ok": "cool"})
class GetDailyDigestView(ListAPIView):
serializer_class = ArticleSerializer
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
if response.hits.hits:
for result in response.hits.hits:
results.append(result["_source"])
return results
def get_queryset(self):
device_id = self.request.GET.get("device_id", "")
queryset = Devices.objects.filter(device_id=device_id)
dd = DailyDigest.objects.filter(device__in=queryset)
if not queryset.exists() or not dd.exists():
return []
return dd.first().articles.all().order_by("-published_on")
def list(self, request, *args, **kwargs):
queryset = self.get_queryset()
if not queryset:
sr = Search(using=es, index="article")
sort = "desc"
sr = sr.sort({"article_score": {"order": sort}})
sr = sr.sort({"published_on": {"order": sort}})
sr = sr[0:20]
response = sr.execute()
results = self.format_response(response)
return Response(create_response({"results": results}))
serializer = self.get_serializer(queryset, many=True)
if serializer.data:
return Response(create_response(serializer.data))
else:
return Response(create_error_response({"Msg": "Daily Digest Doesn't Exist"}), status=400)
class DraftMediaUploadViewSet(viewsets.ViewSet):
"""
this view is used to upload article images
"""
permission_classes = (IsAuthenticated,)
def create(self, request):
image_file = request.data.get("image")
if not image_file:
return Response(create_error_response({"error": "Image file is required."}))
draft_image = DraftMedia.objects.create(image=image_file)
serializer = DraftMediaSerializer(draft_image)
return Response(create_response(serializer.data))
def update(self, request, pk):
image_file = request.data.get("image")
if not image_file:
return Response(create_error_response({"error": "Image file is required."}))
draft_image = DraftMedia.objects.get(id=pk)
if not draft_image:
return Http404
draft_image.image = image_file
draft_image.save()
serializer = DraftMediaSerializer(draft_image)
return Response(create_response(serializer.data))
def destroy(self, request, pk):
draft_image = DraftMedia.objects.get(id=pk)
if not draft_image:
return Http404
draft_image.delete()
return Response(create_response({"Msg": "Image deleted successfully"}))
class CommentViewSet(viewsets.ViewSet):
serializer_class = CommentSerializer
permission_classes = (IsAuthenticated,)
pagination_class = PostpageNumberPagination
ordering = "-created_at"
def get_permissions(self):
"""
Instantiates and returns the list of permissions that this view requires.
"""
if self.action == 'list':
self.permission_classes = [AllowAny]
else:
self.permission_classes = [IsAuthenticated]
return [permission() for permission in self.permission_classes]
def create(self, request):
captcha_response_key = 0
captcha_key = request.data.get("captcha_key")
captcha_value = request.data.get("captcha_value")
captcha = CaptchaStore.objects.filter(hashkey=captcha_key).first()
if not captcha:
return Response(create_error_response({"error": "Invalid Captcha"}))
if captcha.response != captcha_value.lower():
return Response(create_error_response({"error": "Invalid Captcha"}))
data = request.data.copy()
data["user"] = request.user.id
serializer = CommentSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(create_response({"result": serializer.data}))
return Response(create_error_response({"error": "Enter Valid data"}))
def list(self, request):
article_id = request.GET.get("article_id", "")
if not article_id:
return Response(
create_error_response(
{"error": "Article ID has not been entered by the user"}
)
)
article_obj = Article.objects.filter(id=article_id).first()
if not article_obj:
return Response(create_error_response({"error": "Article does not exist"})
)
comment_list = Comment.objects.filter(article=article_obj, reply=None)
serializer = CommentSerializer(comment_list, many=True)
return Response(
create_response(
{"results": serializer.data, "total_article_likes": ArticleLike.objects.filter(
article=article_obj).count()}))
def destroy(self, request, pk):
comment_obj = Comment.objects.filter(id=pk)
if not comment_obj:
return Response(create_error_response({"error": "Comment does not exist"}))
comment_obj.delete()
return Response(create_response({"Msg": "Comment deleted successfully"}))
class LikeAPIView(APIView):
permission_classes = (IsAuthenticated,)
pagination_class = PostpageNumberPagination
ordering = "-created_at"
def post(self, request):
post_data = request.data.copy()
post_data["user"] = request.user.id
serializer = ArticleLikeSerializer(data=post_data)
if serializer.is_valid():
serializer.save()
if serializer.data.get("id"):
return Response(create_response({"Msg": "Liked"}))
return Response(create_response({"Msg": "Removed Like"}))
return Response(create_error_response({"error": "Invalid Data Entered"}))
class CaptchaCommentApiView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
captcha_len = len(CaptchaStore.objects.all())
if captcha_len > 500:
captcha = CaptchaStore.objects.order_by('?')[:1]
to_json_response = dict()
to_json_response['status'] = 1
to_json_response['new_captch_key'] = captcha[0].hashkey
to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key'])
return Response(create_response({"result": to_json_response}))
else:
to_json_response = dict()
to_json_response['status'] = 1
to_json_response['new_captch_key'] = CaptchaStore.generate_key()
to_json_response['new_captch_image'] = captcha_image_url(to_json_response['new_captch_key'])
return Response(create_response({"result": to_json_response}))
class AutoCompleteAPIView(generics.GenericAPIView):
permission_classes = (AllowAny,)
def format_response(self, response):
results = []
if response['hits']['hits']:
for result in response['hits']['hits']:
results.append(result["_source"])
return results
def get(self, request):
result_list = []
if request.data:
query = request.data["q"]
else:
query = request.GET.get("q", "")
if query:
results = es.search(
index="auto_suggestions",
body={
"suggest": {
"results": {
"text": query,
"completion": {"field": "name_suggest"},
}
}
},
)
results = results['suggest']['results'][0]['options']
if results:
for result in results:
result_list.append(
{
"value": result["_source"]["name_suggest"],
"key": result["_source"]["desc"],
}
)
return Response(create_response({"result": result_list}))
return Response(create_response({"result": []}))
class SubsAPIView(ListAPIView):
serializer_class = SubsMediaSerializer
permission_classes = (AllowAny,)
pagination_class = PostpageNumberPagination
def get(self, request):
q = self.request.GET.get("q", None)
subs = Subscription.objects.all()
if q:
subs = subs.filter(user__email__icontains=q)
source = SubsMediaSerializer(subs, many=True)
return Response(create_response({"results": source.data}))
class UpdateSubsAPIView(APIView):
serializer_class = SubsMediaSerializer
permission_classes = (AllowAny,)
def get(self, request, pk):
source = SubsMediaSerializer(Subscription.objects.get(id=pk))
return Response(create_response({"results": source.data}))
def post(self, request, *args, **kwargs):
subs_id = self.request.POST.get('id')
subs = Subscription.objects.filter(id=subs_id)
if subs.exists():
subs = subs.first()
subs.subs_type = self.request.POST.get('subs_type')
auto_renew = self.request.POST.get('auto_renew')
if auto_renew == 'No':
subs.auto_renew = False
else:
subs.auto_renew = True
subs.save()
return Response(create_response({"results": "success"}))
return Response(create_response({"results": "error"}))
class UserProfileAPIView(APIView):
permission_classes = (IsAuthenticated, )
def get(self, request, *args, **kwargs):
user = BaseUserProfile.objects.filter(id=self.request.user.id).first()
serializer = UserProfileSerializer(user)
data = serializer.data
response_data = create_response({"user": data})
return Response(response_data)
def put(self, request, format=None):
if request.user.is_authenticated:
if request.data:
_id = request.data["id"]
else:
_id = self.request.POST.get('id')
user = BaseUserProfile.objects.get(id=_id)
serializer = UserProfileSerializer(user, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(create_response({"result":serializer.data, "Msg":"Profile updated successfully."}))
return Response(create_error_response(serializer.errors), status=400)
raise Http404
class AccessSession(APIView):
permission_classes = (AllowAny,)
def get(self, request):
print(request.META.items())
request.session["ip"] = request.META.get('REMOTE_ADDR')
return Response(create_response({"results": request.session._session_key}))
class RSSAPIView(APIView):
permission_classes = (AllowAny,)
def get(self, request):
data = {}
domain = request.GET.get("domain")
if domain:
domain_obj = Domain.objects.filter(domain_id=domain).first()
if domain_obj:
menus = Menu.objects.filter(domain=domain_obj)
for menu in menus:
all_categories = menu.submenu.all()
for category in all_categories:
data[category.name.name] = "/article/rss/?domain=" + domain + "&category=" + category.name.name
return Response(create_response({"results": data}))
return Response(create_error_response({"error": "Domain do not exist."}))
return Response(create_error_response({"error": "Domain is required"}))
| [
[
[
47,
63
]
],
[
[
88,
95
],
[
7006,
7013
],
[
7558,
7565
],
[
13151,
13158
],
[
49002,
49009
],
[
49326,
49333
],
[
57366,
57373
]
],
[
[
122,
130
]
],
[
[
132,
139
],
[
8728,
8735
],
[
8897,
8904
],
[
10277,
10284
],
[
10595,
10602
],
[
10755,
10762
],
[
10844,
10851
],
[
11005,
11012
],
[
12290,
12297
],
[
13525,
13532
],
[
20772,
20779
],
[
43684,
43691
],
[
45243,
45250
],
[
46191,
46198
],
[
51157,
51164
]
],
[
[
141,
147
],
[
7785,
7791
]
],
[
[
149,
164
],
[
4225,
4240
],
[
17021,
17036
],
[
34305,
34320
],
[
34418,
34433
],
[
36287,
36302
],
[
56494,
56509
],
[
56974,
56989
]
],
[
[
191,
206
],
[
11293,
11308
],
[
13661,
13676
],
[
19789,
19804
]
],
[
[
208,
219
],
[
11412,
11423
],
[
12454,
12465
],
[
20192,
20203
],
[
51613,
51624
]
],
[
[
221,
228
],
[
5909,
5916
],
[
20813,
20820
]
],
[
[
230,
234
],
[
29010,
29014
],
[
57955,
57959
]
],
[
[
236,
248
],
[
4688,
4700
],
[
31470,
31482
],
[
31770,
31782
],
[
32521,
32533
],
[
36717,
36729
]
],
[
[
250,
257
],
[
4459,
4466
],
[
29759,
29766
],
[
30073,
30080
],
[
30459,
30466
],
[
31305,
31312
],
[
32375,
32382
],
[
36356,
36363
],
[
36528,
36535
],
[
36616,
36623
],
[
47162,
47169
]
],
[
[
284,
297
],
[
34633,
34646
]
],
[
[
299,
307
],
[
6442,
6450
],
[
7222,
7230
],
[
26418,
26426
],
[
46080,
46088
]
],
[
[
309,
328
],
[
26550,
26569
]
],
[
[
355,
370
],
[
40441,
40456
]
],
[
[
372,
378
],
[
28814,
28820
],
[
40235,
40241
],
[
57856,
57862
]
],
[
[
380,
391
],
[
47219,
47230
]
],
[
[
393,
403
],
[
42377,
42387
],
[
43385,
43395
],
[
48546,
48556
],
[
48925,
48935
],
[
49249,
49259
]
],
[
[
405,
412
],
[
51371,
51378
],
[
51752,
51759
]
],
[
[
439,
451
],
[
10414,
10426
],
[
55227,
55239
],
[
55632,
55644
],
[
55840,
55852
]
],
[
[
498,
503
],
[
4884,
4889
],
[
36798,
36803
]
],
[
[
538,
545
],
[
3145,
3152
],
[
5361,
5368
],
[
5603,
5610
],
[
6241,
6248
],
[
7592,
7599
],
[
10076,
10083
],
[
13190,
13197
],
[
14597,
14604
],
[
15679,
15686
],
[
17619,
17626
],
[
19558,
19565
],
[
19944,
19951
],
[
22793,
22800
],
[
28498,
28505
],
[
29349,
29356
],
[
30898,
30905
],
[
39881,
39888
],
[
41230,
41237
],
[
44131,
44138
],
[
45641,
45648
],
[
52030,
52037
],
[
52689,
52696
],
[
55472,
55479
],
[
56378,
56385
],
[
57396,
57403
],
[
57675,
57682
]
],
[
[
573,
591
],
[
6423,
6441
],
[
6723,
6741
],
[
7276,
7294
]
],
[
[
593,
610
],
[
8287,
8304
],
[
46788,
46805
],
[
11102,
11119
],
[
41630,
41647
],
[
44579,
44596
],
[
46318,
46335
]
],
[
[
612,
626
],
[
3265,
3279
]
],
[
[
654,
670
],
[
7768,
7784
]
],
[
[
672,
691
],
[
3886,
3905
],
[
4007,
4026
]
],
[
[
693,
718
],
[
32923,
32948
],
[
4833,
4858
],
[
36852,
36877
]
],
[
[
746,
771
],
[
13860,
13885
],
[
14299,
14324
],
[
19763,
19788
]
],
[
[
773,
794
],
[
12665,
12686
],
[
20170,
20191
],
[
52296,
52317
]
],
[
[
796,
813
],
[
20398,
20415
]
],
[
[
841,
855
],
[
28995,
29009
]
],
[
[
857,
879
],
[
4765,
4787
],
[
32498,
32520
],
[
36694,
36716
]
],
[
[
881,
906
],
[
40415,
40440
]
],
[
[
934,
963
],
[
42679,
42708
],
[
43733,
43762
]
],
[
[
965,
985
],
[
48611,
48631
],
[
49098,
49118
]
],
[
[
987,
1004
],
[
49509,
49526
],
[
50601,
50618
],
[
51448,
51465
]
],
[
[
1032,
1053
]
],
[
[
1055,
1074
],
[
55034,
55053
],
[
55505,
55524
],
[
55342,
55361
],
[
55612,
55631
]
],
[
[
1076,
1097
],
[
56579,
56600
],
[
57035,
57056
]
],
[
[
1136,
1144
],
[
3392,
3400
],
[
3484,
3492
],
[
4178,
4186
],
[
5315,
5323
],
[
5513,
5521
],
[
6067,
6075
],
[
6144,
6152
],
[
6492,
6500
],
[
6867,
6875
],
[
6929,
6937
],
[
7419,
7427
],
[
7481,
7489
],
[
7833,
7841
],
[
9880,
9888
],
[
9972,
9980
],
[
11859,
11867
],
[
12728,
12736
],
[
12917,
12925
],
[
13062,
13070
],
[
14085,
14093
],
[
14370,
14378
],
[
15420,
15428
],
[
15551,
15559
],
[
17378,
17386
],
[
17502,
17510
],
[
18332,
18340
],
[
18578,
18586
],
[
18824,
18832
],
[
19058,
19066
],
[
19234,
19242
],
[
19397,
19405
],
[
19858,
19866
],
[
20280,
20288
],
[
22482,
22490
],
[
22574,
22582
],
[
22725,
22733
],
[
24826,
24834
],
[
28446,
28454
],
[
28725,
28733
],
[
28907,
28915
],
[
29273,
29281
],
[
29942,
29950
],
[
30218,
30226
],
[
30333,
30341
],
[
30582,
30590
],
[
30689,
30697
],
[
30781,
30789
],
[
31686,
31694
],
[
31957,
31965
],
[
32062,
32070
],
[
32595,
32603
],
[
32655,
32663
],
[
37713,
37721
],
[
37847,
37855
],
[
38772,
38780
],
[
39770,
39778
],
[
40146,
40154
],
[
40328,
40336
],
[
40514,
40522
],
[
42915,
42923
],
[
42973,
42981
],
[
43978,
43986
],
[
44036,
44044
],
[
45325,
45333
],
[
45529,
45537
],
[
46699,
46707
],
[
47853,
47861
],
[
48011,
48019
],
[
48087,
48095
],
[
48453,
48461
],
[
48660,
48668
],
[
48832,
48840
],
[
49147,
49155
],
[
49379,
49387
],
[
50307,
50315
],
[
50443,
50451
],
[
50713,
50721
],
[
50783,
50791
],
[
50977,
50985
],
[
51250,
51258
],
[
51506,
51514
],
[
51829,
51837
],
[
51943,
51951
],
[
52463,
52471
],
[
52526,
52534
],
[
52592,
52600
],
[
53190,
53198
],
[
53542,
53550
],
[
54869,
54877
],
[
54935,
54943
],
[
55394,
55402
],
[
55680,
55688
],
[
56238,
56246
],
[
56303,
56311
],
[
56709,
56717
],
[
57177,
57185
],
[
57289,
57297
],
[
57587,
57595
],
[
58280,
58288
],
[
58344,
58352
],
[
58426,
58434
]
],
[
[
1184,
1192
],
[
3181,
3189
],
[
3932,
3940
],
[
6277,
6285
],
[
7628,
7636
],
[
8331,
8339
],
[
10112,
10120
],
[
14633,
14641
],
[
15715,
15723
],
[
20442,
20450
],
[
22897,
22905
],
[
28589,
28597
],
[
30990,
30998
],
[
32889,
32897
],
[
39917,
39925
],
[
46832,
46840
],
[
53678,
53686
],
[
55080,
55088
],
[
55551,
55559
],
[
57432,
57440
],
[
57711,
57719
],
[
49859,
49867
]
],
[
[
1194,
1209
],
[
5397,
5412
],
[
5719,
5734
],
[
13226,
13241
],
[
17655,
17670
],
[
19659,
19674
],
[
20040,
20055
],
[
29449,
29464
],
[
41336,
41351
],
[
44285,
44300
],
[
45738,
45753
],
[
48310,
48325
],
[
49553,
49568
],
[
52066,
52081
],
[
52725,
52740
],
[
56414,
56429
],
[
49922,
49937
]
],
[
[
1237,
1244
],
[
8413,
8420
]
],
[
[
1280,
1301
],
[
37965,
37986
]
],
[
[
1331,
1332
],
[
9272,
9273
],
[
9360,
9361
]
],
[
[
1371,
1383
],
[
3624,
3636
],
[
7908,
7920
]
],
[
[
1408,
1419
],
[
2569,
2580
],
[
3036,
3047
]
],
[
[
1447,
1455
],
[
3837,
3845
],
[
32756,
32764
],
[
53626,
53634
]
],
[
[
1457,
1465
],
[
48202,
48210
],
[
49467,
49475
]
],
[
[
1504,
1520
],
[
8116,
8132
]
],
[
[
1557,
1568
],
[
8250,
8261
],
[
20361,
20372
],
[
46751,
46762
],
[
54997,
55008
]
],
[
[
1604,
1614
],
[
5759,
5769
]
],
[
[
1644,
1666
],
[
16698,
16720
]
],
[
[
1691,
1699
],
[
16753,
16761
],
[
40850,
40858
],
[
40881,
40889
],
[
40949,
40957
],
[
40980,
40988
]
],
[
[
1721,
1729
],
[
20602,
20610
]
],
[
[
1731,
1740
],
[
20914,
20923
],
[
21335,
21344
],
[
21742,
21751
]
],
[
[
1770,
1775
],
[
21073,
21078
],
[
21496,
21501
],
[
21892,
21897
]
],
[
[
1777,
1780
],
[
11031,
11034
]
],
[
[
1782,
1785
],
[
10781,
10784
]
],
[
[
1793,
1797
],
[
20634,
20638
],
[
20710,
20714
]
],
[
[
1805,
1809
],
[
15884,
15888
]
],
[
[
1833,
1835
],
[
15015,
15017
],
[
15263,
15265
],
[
25092,
25094
],
[
25326,
25328
],
[
47551,
47553
],
[
54130,
54132
]
],
[
[
1837,
1854
],
[
41839,
41856
],
[
44827,
44844
],
[
46625,
46642
]
],
[
[
1856,
1875
],
[
45097,
45116
],
[
46397,
46416
]
],
[
[
1906,
1912
],
[
25079,
25085
],
[
25313,
25319
],
[
47538,
47544
]
],
[
[
1920,
1924
],
[
27754,
27758
]
],
[
[
1963,
1982
],
[
27902,
27921
],
[
28075,
28094
]
],
[
[
2017,
2038
],
[
33099,
33108
]
],
[
[
2065,
2073
],
[
33154,
33162
]
],
[
[
2081,
2089
],
[
35064,
35072
]
],
[
[
2122,
2143
],
[
6153,
6174
],
[
6938,
6959
],
[
7490,
7511
],
[
9981,
10002
],
[
12926,
12947
],
[
13071,
13092
],
[
15560,
15581
],
[
17511,
17532
],
[
18341,
18362
],
[
18587,
18608
],
[
18833,
18854
],
[
19243,
19264
],
[
19406,
19427
],
[
22583,
22604
],
[
28734,
28755
],
[
28916,
28937
],
[
30790,
30811
],
[
32088,
32109
],
[
32664,
32685
],
[
37722,
37743
],
[
37856,
37877
],
[
40155,
40176
],
[
40337,
40358
],
[
42982,
43003
],
[
44045,
44066
],
[
45334,
45355
],
[
48096,
48117
],
[
48462,
48483
],
[
48841,
48862
],
[
50316,
50337
],
[
50452,
50473
],
[
50792,
50813
],
[
51003,
51024
],
[
51259,
51280
],
[
51838,
51859
],
[
52601,
52622
],
[
57298,
57319
],
[
58353,
58374
],
[
58435,
58456
]
],
[
[
2145,
2159
],
[
37588,
37602
]
],
[
[
2161,
2176
],
[
37649,
37664
],
[
38006,
38021
],
[
39832,
39847
]
],
[
[
2210,
2234
],
[
38165,
38189
],
[
38354,
38378
],
[
38970,
38994
],
[
39191,
39215
]
],
[
[
2243,
2250
],
[
2416,
2423
]
],
[
[
2258,
2266
],
[
9257,
9265
],
[
9345,
9353
],
[
23655,
23663
],
[
23867,
23875
],
[
24088,
24096
]
],
[
[
2289,
2295
],
[
9250,
9256
],
[
9338,
9344
]
],
[
[
2303,
2309
],
[
40830,
40836
],
[
41036,
41042
]
],
[
[
2317,
2321
]
],
[
[
2349,
2361
],
[
50207,
50219
],
[
52798,
52810
],
[
52878,
52890
],
[
53390,
53402
]
],
[
[
2390,
2407
],
[
53117,
53134
],
[
53469,
53486
]
],
[
[
2410,
2413
],
[
33272,
33275
],
[
35328,
35331
]
],
[
[
2450,
2465
],
[
3401,
3416
],
[
5268,
5283
],
[
5522,
5537
],
[
6076,
6091
],
[
6501,
6516
],
[
6876,
6891
],
[
7428,
7443
],
[
7842,
7857
],
[
9889,
9904
],
[
11868,
11883
],
[
12737,
12752
],
[
14094,
14109
],
[
14379,
14394
],
[
15429,
15444
],
[
17387,
17402
],
[
19067,
19082
],
[
19867,
19882
],
[
20289,
20304
],
[
22491,
22506
],
[
22734,
22749
],
[
28455,
28470
],
[
29282,
29297
],
[
29951,
29966
],
[
30227,
30242
],
[
30342,
30357
],
[
30591,
30606
],
[
30698,
30713
],
[
31695,
31710
],
[
31966,
31981
],
[
32604,
32619
],
[
38781,
38796
],
[
39779,
39794
],
[
40523,
40538
],
[
42924,
42939
],
[
43987,
44002
],
[
45538,
45553
],
[
47862,
47877
],
[
48020,
48035
],
[
48669,
48684
],
[
49156,
49171
],
[
49388,
49403
],
[
50722,
50737
],
[
51528,
51543
],
[
51952,
51967
],
[
52472,
52487
],
[
52535,
52550
],
[
53199,
53214
],
[
53551,
53566
],
[
54878,
54893
],
[
54944,
54959
],
[
55403,
55418
],
[
55689,
55704
],
[
56247,
56262
],
[
56312,
56327
],
[
56662,
56677
],
[
57186,
57201
],
[
57596,
57611
],
[
58289,
58304
]
],
[
[
2687,
2719
],
[
3510,
3542
],
[
4107,
4139
],
[
24835,
24867
]
],
[
[
3131,
3144
]
],
[
[
3604,
3623
]
],
[
[
3824,
3836
]
],
[
[
5347,
5360
]
],
[
[
5584,
5602
]
],
[
[
6221,
6240
]
],
[
[
7574,
7591
]
],
[
[
7893,
7907
],
[
12004,
12018
],
[
14544,
14558
]
],
[
[
8091,
8115
],
[
8365,
8389
],
[
49594,
49618
],
[
52107,
52131
],
[
55114,
55138
]
],
[
[
8231,
8249
]
],
[
[
10055,
10075
]
],
[
[
13167,
13189
]
],
[
[
14567,
14596
]
],
[
[
15657,
15678
]
],
[
[
17597,
17618
]
],
[
[
19535,
19557
]
],
[
[
19925,
19943
]
],
[
[
20346,
20360
]
],
[
[
22776,
22792
]
],
[
[
28486,
28497
]
],
[
[
29334,
29348
]
],
[
[
30878,
30897
]
],
[
[
32740,
32755
]
],
[
[
39858,
39880
]
],
[
[
40574,
40595
],
[
41239,
41260
],
[
44140,
44161
]
],
[
[
41206,
41229
]
],
[
[
44107,
44130
]
],
[
[
45622,
45640
]
],
[
[
46732,
46750
]
],
[
[
48178,
48201
]
],
[
[
49452,
49466
]
],
[
[
52018,
52029
]
],
[
[
52667,
52688
]
],
[
[
53606,
53625
]
],
[
[
54985,
54996
]
],
[
[
55454,
55471
]
],
[
[
56359,
56377
]
],
[
[
57382,
57395
]
],
[
[
57664,
57674
]
]
] |
import json
import os
import sys
import disnake
from disnake.ext import commands
from disnake.ext.commands import Context
from helpers import json_manager, checks
import logging
if not os.path.isfile("../config.json"):
sys.exit("'config.json' not found by general-normal! Please add it and try again.")
else:
with open("../config.json") as file:
config = json.load(file)
''' Logging '''
logger = logging.getLogger('discord')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename='../logs/discord.log', encoding='utf-8',mode='w')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
class Owner(commands.Cog, name="owner-normal"):
def __init__(self, bot):
self.bot = bot
@commands.command(
name="shutdown",
description="Make the bot shutdown.",
)
@checks.is_owner()
async def shutdown(self, context: Context):
"""
Makes the bot shutdown.
"""
embed = disnake.Embed(
description="Shutting down. Bye! :wave:",
color=0x9C84EF
)
logger.info(f"Shutting down. Bye! :wave:")
await context.send(embed=embed)
await self.bot.close()
@commands.command(
name="say",
description="The bot will say anything you want.",
)
@checks.is_owner()
async def say(self, context: Context, *, message: str):
"""
The bot will say anything you want.
"""
logger.info(f"Saying '{message}'")
await context.send(message)
@commands.command(
name="embed",
description="The bot will say anything you want, but within embeds.",
)
@checks.is_owner()
async def embed(self, context: Context, *, message: str):
"""
The bot will say anything you want, but within embeds.
"""
embed = disnake.Embed(
description=message,
color=0x9C84EF
)
logger.info(f"Saying '{message}'")
await context.send(embed=embed)
@commands.group(
name="blacklist"
)
async def blacklist(self, context: Context):
"""
Lets you add or remove a user from not being able to use the bot.
"""
if context.invoked_subcommand is None:
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
embed = disnake.Embed(
title=f"There are currently {len(blacklist['ids'])} blacklisted IDs",
description=f"{', '.join(str(id) for id in blacklist['ids'])}",
color=0x9C84EF
)
await context.send(embed=embed)
@blacklist.command(
name="add"
)
async def blacklist_add(self, context: Context, member: disnake.Member = None):
"""
Lets you add a user from not being able to use the bot.
"""
try:
user_id = member.id
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
if user_id in blacklist['ids']:
embed = disnake.Embed(
title="Error!",
description=f"**{member.name}** is already in the blacklist.",
color=0xE02B2B
)
return await context.send(embed=embed)
json_manager.add_user_to_blacklist(user_id)
embed = disnake.Embed(
title="User Blacklisted",
description=f"**{member.name}** has been successfully added to the blacklist",
color=0x9C84EF
)
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
embed.set_footer(
text=f"There are now {len(blacklist['ids'])} users in the blacklist"
)
logger.info(f"{member.name} has been added to the blacklist.")
await context.send(embed=embed)
except:
embed = disnake.Embed(
title="Error!",
description=f"An unknown error occurred when trying to add **{member.name}** to the blacklist.",
color=0xE02B2B
)
await context.send(embed=embed)
@blacklist.command(
name="remove"
)
async def blacklist_remove(self, context, member: disnake.Member = None):
"""
Lets you remove a user from not being able to use the bot.
"""
try:
user_id = member.id
json_manager.remove_user_from_blacklist(user_id)
embed = disnake.Embed(
title="User removed from blacklist",
description=f"**{member.name}** has been successfully removed from the blacklist",
color=0x9C84EF
)
with open("../data/blacklist.json") as file:
blacklist = json.load(file)
embed.set_footer(
text=f"There are now {len(blacklist['ids'])} users in the blacklist"
)
logger.info(f"{member.name} has been removed from the blacklist.")
await context.send(embed=embed)
except:
embed = disnake.Embed(
title="Error!",
description=f"**{member.name}** is not in the blacklist.",
color=0xE02B2B
)
await context.send(embed=embed)
def setup(bot):
bot.add_cog(Owner(bot)) | [
[
[
7,
11
],
[
374,
378
],
[
2412,
2416
],
[
3070,
3074
],
[
3754,
3758
],
[
4948,
4952
]
],
[
[
19,
21
],
[
188,
190
]
],
[
[
29,
32
],
[
226,
229
]
],
[
[
41,
48
],
[
1027,
1034
],
[
1912,
1919
],
[
2448,
2455
],
[
2828,
2835
],
[
3154,
3161
],
[
3472,
3479
],
[
4054,
4061
],
[
4410,
4417
],
[
4651,
4658
],
[
5252,
5259
]
],
[
[
73,
81
],
[
695,
703
],
[
789,
797
],
[
1261,
1269
],
[
1600,
1608
],
[
2086,
2094
]
],
[
[
115,
122
],
[
945,
952
],
[
1420,
1427
],
[
1782,
1789
],
[
2172,
2179
],
[
2811,
2818
]
],
[
[
144,
156
],
[
3408,
3420
],
[
4582,
4594
]
],
[
[
158,
164
],
[
889,
895
],
[
1369,
1375
],
[
1729,
1735
]
],
[
[
172,
179
],
[
416,
423
],
[
461,
468
],
[
485,
492
],
[
585,
592
]
],
[
[
351,
355
],
[
384,
388
]
],
[
[
365,
371
]
],
[
[
407,
413
],
[
445,
451
],
[
655,
661
],
[
1141,
1147
],
[
1523,
1529
],
[
2005,
2011
],
[
3911,
3917
],
[
5105,
5111
]
],
[
[
475,
482
],
[
564,
571
],
[
673,
680
]
],
[
[
689,
694
],
[
5497,
5502
]
],
[
[
5469,
5474
]
]
] |
from django.apps import AppConfig
class MarkersConfig(AppConfig):
name = 'markers'
| [
[
[
24,
33
],
[
56,
65
]
],
[
[
42,
55
]
]
] |
from os import (
startfile,
getcwd
)
from os.path import join
from io import BytesIO
from csv import (
writer,
excel
)
from openpyxl import (
Workbook,
load_workbook
)
from statistics import (
mean,
variance,
stdev
)
from treetopper.plot import Plot
from treetopper.timber import (
TimberQuick,
TimberFull
)
from treetopper.log import Log
from treetopper.thin import (
ThinTPA,
ThinBA,
ThinRD
)
from treetopper._exceptions import TargetDensityError
from treetopper.fvs import FVS
from treetopper._constants import (
math,
ALL_SPECIES_NAMES,
GRADE_SORT,
LOG_LENGTHS,
SORTED_HEADS
)
from treetopper._utils import (
format_comma,
format_pct,
extension_check,
reorder_dict,
check_date,
add_logs_to_table_heads
)
from treetopper._import_from_sheets import import_from_sheet
from treetopper._print_console import (
print_stand_species,
print_stand_logs,
print_stand_stats
)
from treetopper._print_pdf import PDF
class Stand(object):
"""The Stand Class represents a stand of timber that has had an inventory conducted on it. It should made up of plots (Plot Class)
which contain trees (Timber Classes).
The Stand class will run calculations and statistics of the current stand conditions and it will run calculations of the log
merchantabilty for three metrics: logs per acre, log board feet per acre, and log cubic feet per acre, based on log grades,
log length ranges and species.
"""
def __init__(self, name: str, plot_factor: float, acres: float = None, inventory_date: str = None):
self.name = name.upper()
self.plot_factor = plot_factor
self.plots = []
self.plot_count = 0
self.tpa = 0
self.ba_ac = 0
self.qmd = 0
self.rd_ac = 0
self.bf_ac = 0
self.cf_ac = 0
self.avg_hgt = 0
self.hdr = 0
self.vbar = 0
self.tpa_stats = {}
self.ba_ac_stats = {}
self.rd_ac_stats = {}
self.bf_ac_stats = {}
self.cf_ac_stats = {}
self.species = {}
self.species_gross = {}
self.species_stats = {}
self.logs = {}
self.table_data = []
self.summary_stand = []
self.summary_logs = {}
self.summary_stats = []
self.metrics = ['tpa', 'ba_ac', 'rd_ac', 'bf_ac', 'cf_ac']
self.attrs = ['_gross', '_stats', '']
self.acres = acres
if inventory_date:
self.inv_date = check_date(inventory_date)
else:
self.inv_date = inventory_date
def __getitem__(self, attribute: str):
return self.__dict__[attribute]
def get_stand_table_text(self):
"""Returns a console-formatted string of current stand conditions"""
return print_stand_species(self.summary_stand)
def get_logs_table_text(self):
"""Returns a console-formatted string of stand logs data"""
return print_stand_logs(self.summary_logs)
def get_stats_table_text(self):
"""Returns and console-formatted string of stand stand statistics"""
return print_stand_stats(self.summary_stats)
def get_console_report_text(self):
"""Returns a console-formatted string of the complete stand report"""
return self._compile_report_text()
def console_report(self):
"""Prints a console-formatted string of the complete stand report"""
print(self._compile_report_text())
def get_pdf_report_bytes_io(self):
pdf = self._compile_pdf_report()
return BytesIO(pdf.output(dest='S').encode('latin-1'))
def pdf_report(self, filename: str, directory: str = None, start_file_upon_creation: bool = False):
"""Exports a pdf of the complete stand report to a user specified directory or if directory is None,
to the current working directory. Will open the created pdf report if start_file_upon_creation is True"""
check = extension_check(filename, '.pdf')
if directory:
file = join(directory, check)
else:
file = join(getcwd(), check)
pdf = self._compile_pdf_report()
pdf.output(file, 'F')
if start_file_upon_creation:
startfile(file)
def add_plot(self, plot: Plot):
"""Adds a plot to the stand's plots list and re-runs the calculations and statistics of the stand.
plot argument needs to be the a Plot Class"""
self.plots.append(plot)
self.plot_count += 1
for met in self.metrics:
self._update_metrics(met)
self.qmd = math.sqrt((self.ba_ac / self.tpa) / .005454)
self.vbar = self.bf_ac / self.ba_ac
self._update_species(plot)
self._update_logs(plot)
self.table_data = self._update_table_data()
self.summary_stand = self._update_summary_stand()
self.summary_logs = self._update_summary_logs()
self.summary_stats = self._update_summary_stats()
def import_sheet_quick(self, file_path: str):
"""Imports tree and plot data from a CSV or XLSX file for a quick cruise and adds that data to the stand"""
plots = import_from_sheet(file_path, self.name, 'q')
for plot_num in plots:
plot = Plot()
for tree in plots[plot_num]:
plot.add_tree(TimberQuick(self.plot_factor, *tree))
self.add_plot(plot)
def import_sheet_full(self, file_path: str):
"""Imports tree and plot data from a CSV or XLSX file for a full cruise and adds that data to the stand"""
plots = import_from_sheet(file_path, self.name, 'f')
for plot_num in plots:
plot = Plot()
for tree_data in plots[plot_num]:
args = tree_data[: -1]
logs = tree_data[-1]
tree = TimberFull(self.plot_factor, *args)
for log in logs:
tree.add_log(*log)
plot.add_tree(tree)
self.add_plot(plot)
def table_to_csv(self, filename: str, directory: str = None):
"""Creates or appends a CSV file with tree data from self.table_data"""
check = extension_check(filename, '.csv')
if directory:
file = join(directory, check)
else:
file = join(getcwd(), check)
if isfile(file):
allow = 'a'
start = 1
else:
allow = 'w'
start = 0
with open(file, allow, newline='') as csv_file:
csv_write = writer(csv_file, dialect=excel)
for i in self.table_data[start:]:
csv_write.writerow(i)
def table_to_excel(self, filename: str, directory: str = None):
"""Creates or appends an Excel file with tree data from self.table_data"""
check = extension_check(filename, '.xlsx')
if directory:
file = join(directory, check)
else:
file = join(getcwd(), check)
if isfile(file):
wb = load_workbook(file)
ws = wb.active
for i in self.table_data[1:]:
ws.append(i)
wb.save(file)
else:
wb = Workbook()
ws = wb.active
for i in self.table_data:
ws.append(i)
wb.save(file)
def _update_metrics(self, metric: str):
"""Updates stand metrics based on the metric entered in the argument, used internally"""
metric_list = [plot[metric] for plot in self.plots]
stats = self._get_stats(metric_list)
setattr(self, metric, stats['mean'])
setattr(self, f'{metric}_stats', stats)
def _update_species(self, plot):
"""Re-runs stand conditions calculations and statistics, used internally"""
update_after = ['qmd', 'vbar', 'avg_hgt', 'hdr']
if self.plot_count == 0:
return
else:
for species in plot.species:
if species not in self.species_gross:
for attr in self.attrs:
if attr == '_gross':
getattr(self, f'species{attr}')[species] = {met: [] for met in self.metrics}
else:
getattr(self, f'species{attr}')[species] = {met: 0 for met in self.metrics}
for key in plot.species[species]:
if key not in update_after:
self.species_gross[species][key].append(plot.species[species][key])
for species in self.species_gross:
for key in self.species_gross[species]:
if key not in update_after:
data = self.species_gross[species][key]
if len(data) < self.plot_count:
data += ([0] * (self.plot_count - len(data)))
stats = self._get_stats(data)
self.species[species][key] = stats['mean']
self.species_stats[species][key] = stats
self.species[species]['qmd'] = math.sqrt((self.species[species]['ba_ac'] / self.species[species]['tpa']) / 0.005454)
self.species[species]['vbar'] = self.species[species]['bf_ac'] / self.species[species]['ba_ac']
if species == 'totals_all':
self.species[species]['avg_hgt'] = mean([p.avg_hgt for p in self.plots])
self.species[species]['hdr'] = mean([p.hdr for p in self.plots])
else:
trees = []
for p in self.plots:
for t in p.trees:
trees.append(t)
self.species[species]['avg_hgt'] = mean([t.height for t in trees if t.species == species])
self.species[species]['hdr'] = mean([t.hdr for t in trees if t.species == species])
def _update_logs(self, plot):
"""Re-runs stand logs calculations, used internally"""
if self.plot_count == 0:
return
else:
subs = ['lpa', 'bf_ac', 'cf_ac']
for species in plot.logs:
if species not in self.logs:
self.logs[species] = {}
for grade in plot.logs[species]:
if grade not in self.logs[species]:
self.logs[species][grade] = {rng: {sub: {'gross': [], 'mean': 0} for sub in subs} for rng in LOG_LENGTHS}
self.logs[species][grade]['totals_by_grade'] = {sub: {'gross': [], 'mean': 0} for sub in subs}
for rng in plot.logs[species][grade]:
if rng != 'display':
for sub in subs:
self.logs[species][grade][rng][sub]['gross'].append(plot.logs[species][grade][rng][sub])
for species in self.logs:
for grade in self.logs[species]:
for rng in self.logs[species][grade]:
for sub in subs:
gross = self.logs[species][grade][rng][sub]['gross']
if len(gross) < self.plot_count:
gross += ([0] * (self.plot_count - len(gross)))
self.logs[species][grade][rng][sub]['mean'] = mean(gross)
def _update_table_data(self):
"""Converts stand data to plot/tree inventory data table layout, used internally"""
heads = ['Stand', 'Plot Number', 'Tree Number', 'Species', 'DBH', 'Height',
'Stump Height', 'Log 1 Length', 'Log 1 Grade', 'Log 1 Defect', 'Between Logs Feet']
master = []
max_logs = []
for i, plot in enumerate(self.plots):
for j, tree in enumerate(plot.trees):
temp = [self.name, i + 1, j + 1]
for key in ['species', 'dbh', 'height']:
temp.append(tree[key])
len_logs = len(tree.logs)
max_logs.append(len_logs)
for k, lnum in enumerate(tree.logs):
log = tree.logs[lnum]
if lnum == 1:
temp.append(log.stem_height - log.length - 1)
for lkey in ['length', 'grade', 'defect']:
temp.append(log[lkey])
if k < len(tree.logs) - 1:
between = tree.logs[lnum+1].stem_height - log.stem_height - tree.logs[lnum+1].length - 1
if between < 0:
temp.append(0)
else:
temp.append(between)
master.append(temp)
heads += add_logs_to_table_heads(max(max_logs))
len_heads = len(heads)
for i in master:
len_i = len(i)
if len_i < len_heads:
i += ['' for j in range(len_heads - len_i)]
master.insert(0, heads)
return master
def _update_summary_stand(self):
"""Updates the current stand conditions list of stand.summary_stand, used internally"""
heads = ['SPECIES'] + [head[1] for head in SORTED_HEADS]
body_data = []
for key in self.species:
if key == 'totals_all':
show = 'TOTALS'
else:
show = key
temp = [str(show)] + [format_comma(self.species[key][i[0]]) for i in SORTED_HEADS]
body_data.append(temp)
body_data.append(body_data.pop(0))
body_data.insert(0, heads)
return body_data
def _update_summary_logs(self):
"""Updates the stand logs summary dict, data-tables are broken down by metric type --> species, used internally.
Example: self.summary_logs['BOARD FEET PER ACRE']['DF'] --> data table"""
table_data = {}
tables = [['bf_ac', 'BOARD FEET PER ACRE'], ['cf_ac', 'CUBIC FEET PER ACRE'], ['lpa', 'LOGS PER ACRE']]
for table in tables:
metric_key = table[0]
key = table[1]
table_data[key] = {}
for species in self.logs:
if species == 'totals_all':
show = 'TOTALS'
else:
show = ALL_SPECIES_NAMES[species]
table_data[key][show] = [['LOG GRADES'] + [rng.upper() for rng in LOG_LENGTHS] + ['TOTALS']]
grade_sort = []
for grade in self.logs[species]:
values = [self.logs[species][grade][rng][metric_key]['mean'] for rng in self.logs[species][grade]]
if sum(values) > 0:
if grade == 'totals_by_length':
col_text = 'TOTALS'
else:
col_text = grade
grade_sort.append([col_text] + [format_comma(z) for z in values])
grade_sort = sorted(grade_sort, key=lambda x: GRADE_SORT[x[0]])
for g in grade_sort:
table_data[key][show].append(g)
table_data[key] = reorder_dict(table_data[key])
return table_data
def _update_summary_stats(self):
"""Updates the stand statistics dict, stats-tables are broken down by species, used internally.
Example: self.summary_stats['DF'] --> stats-table"""
tables = {}
for spp in self.species_stats:
if spp == 'totals_all':
show = 'TOTALS'
else:
show = ALL_SPECIES_NAMES[spp]
tables[show] = [['METRIC'] + [head.upper() for head in self.species_stats[spp]['tpa'] if head != 'low_avg_high'] + ['LOW',
'AVERAGE',
'HIGH']]
for key in self.species_stats[spp]:
temp = [key.upper()]
not_enough_data = False
for sub in self.species_stats[spp][key]:
x = self.species_stats[spp][key][sub]
if not_enough_data:
if x == 'Not enough data':
if sub == 'low_avg_high':
for i in range(3):
temp.append('-')
else:
temp.append('-')
else:
if x == 'Not enough data':
temp.append(x)
not_enough_data = True
else:
if sub == 'low_avg_high':
for i in x:
temp.append(format_comma(i))
elif sub == 'stderr_pct':
temp.append(format_pct(x))
else:
temp.append(format_comma(x))
tables[show].append(temp)
return reorder_dict(tables)
def _get_stats(self, data):
"""Runs the statistical calculations on a set of the stand conditions data, returns an updated sub dict, used internally"""
m = mean(data)
if len(data) >= 2:
std = stdev(data)
ste = std / math.sqrt(self.plot_count)
low_avg_high = [max(round(m - ste, 1), 0), m, m + ste]
d = {'mean': m,
'variance': variance(data),
'stdev': std,
'stderr': ste,
'stderr_pct': (ste / m) * 100,
'low_avg_high': low_avg_high}
else:
d = {'mean': m,
'variance': 'Not enough data',
'stdev': 'Not enough data',
'stderr': 'Not enough data',
'stderr_pct': 'Not enough data',
'low_avg_high': 'Not enough data'}
return d
def _compile_report_text(self):
"""Compiles the console-formatted report of all stand data and stats, used internally"""
n = '\n' * 4
console_text = f'{print_stand_species(self.summary_stand)}{n}'
console_text += f'{print_stand_logs(self.summary_logs)}{n}'
console_text += f'{print_stand_stats(self.summary_stats)}'
return console_text
def _compile_pdf_report(self):
pdf = PDF()
pdf.alias_nb_pages()
pdf.add_page()
pdf.compile_stand_report(self)
return pdf
if __name__ == '__main__':
import argparse
import traceback
import sys
from os import mkdir, getcwd
from os.path import join, isfile, isdir, expanduser
from treetopper._utils import get_desktop_path
def make_dir_and_subdir(workflow_num):
desktop = get_desktop_path()
tt_dir = join(desktop, 'treetopper_outputs')
if not isdir(tt_dir):
mkdir(tt_dir)
wf_dir = join(tt_dir, f'workflow_{workflow_num}')
if not isdir(wf_dir):
mkdir(wf_dir)
return wf_dir
def get_package_path(filename):
path = None
for i in sys.path:
if 'AppData' in i and i[-13:] == 'site-packages':
path = i
break
tt_path = join(path, 'treetopper')
sheet_path = join(tt_path, 'example_csv_and_xlsx')
final = join(sheet_path, filename)
return final
parser = argparse.ArgumentParser(description='treetopper Example Workflows')
parser.add_argument('workflow_number', help='Enter the number of the workflow to run.\n Valid workflow numbers: 1, 2, 3, 4, 5, 6)')
args = parser.parse_args()
wf = args.workflow_number
while True:
if wf not in ['1', '2', '3', '4', '5', '6']:
print('Please enter a workflow number 1, 2, 3, 4, 5, or 6')
wf = input('Workflow #: ')
else:
break
wf = int(wf)
def workflow_1(workflow_number):
stand = Stand('WF1', -20)
plot_factor = stand.plot_factor
tree_data = [
# Plot 1
[TimberQuick(plot_factor, 'DF', 29.5, 119), TimberQuick(plot_factor, 'WH', 18.9, 102),
TimberQuick(plot_factor, 'WH', 20.2, 101), TimberQuick(plot_factor, 'WH', 19.9, 100),
TimberQuick(plot_factor, 'DF', 20.6, 112)],
# Plot 2
[TimberQuick(plot_factor, 'DF', 25.0, 117), TimberQuick(plot_factor, 'DF', 14.3, 105),
TimberQuick(plot_factor, 'DF', 20.4, 119), TimberQuick(plot_factor, 'DF', 16.0, 108),
TimberQuick(plot_factor, 'RC', 20.2, 124), TimberQuick(plot_factor, 'RC', 19.5, 116),
TimberQuick(plot_factor, 'RC', 23.4, 121), TimberQuick(plot_factor, 'DF', 17.8, 116),
TimberQuick(plot_factor, 'DF', 22.3, 125)]
]
for trees in tree_data:
plot = Plot()
for tree in trees:
plot.add_tree(tree)
stand.add_plot(plot)
path = make_dir_and_subdir(workflow_number)
stand.console_report()
stand.table_to_csv(join(path, 'example_csv_export.csv'))
thin80tpa = ThinTPA(stand, 80)
thin80tpa.console_report()
end_message = """**WORKFLOW 1 created a QUICK CRUISE stand from manually entered tree data.
It then ran a thinning scenario with a target density of 80 Trees per Acre considering all species and diameter ranges.
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .csv "example_csv_export.csv" in desktop/treetopper_outputs/workflow_1/
"""
print(f'\n\n{end_message}')
def workflow_2(workflow_number):
stand = Stand('WF2', 33.3)
plot_factor = stand.plot_factor
tree_data = [
# Plot 1
[[TimberFull(plot_factor, 'DF', 29.5, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 0], [102, 18, 'S4', 10]]],
[TimberFull(plot_factor, 'WH', 18.9, 102), [[42, 40, 'S2', 0], [79, 36, 'S4', 5]]],
[TimberFull(plot_factor, 'WH', 20.2, 101), [[42, 40, 'S2', 5], [83, 40, 'S4', 0]]],
[TimberFull(plot_factor, 'WH', 19.9, 100), [[42, 40, 'S2', 0], [83, 40, 'S4', 15]]],
[TimberFull(plot_factor, 'DF', 20.6, 112), [[42, 40, 'S2', 0], [83, 40, 'S3', 5], [100, 16, 'UT', 10]]]],
# Plot 2
[[TimberFull(plot_factor, 'DF', 25.0, 117), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [100, 16, 'S4', 0]]],
[TimberFull(plot_factor, 'DF', 14.3, 105), [[42, 40, 'S3', 0], [79, 36, 'S4', 0]]],
[TimberFull(plot_factor, 'DF', 20.4, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 5], [100, 16, 'S4', 5]]],
[TimberFull(plot_factor, 'DF', 16.0, 108), [[42, 40, 'S3', 5], [83, 40, 'S3', 10]]],
[TimberFull(plot_factor, 'RC', 20.2, 124), [[42, 40, 'CR', 5], [83, 40, 'CR', 5], [104, 20, 'CR', 5]]],
[TimberFull(plot_factor, 'RC', 19.5, 116), [[42, 40, 'CR', 10], [83, 40, 'CR', 5], [100, 16, 'CR', 0]]],
[TimberFull(plot_factor, 'RC', 23.4, 121), [[42, 40, 'CR', 0], [83, 40, 'CR', 0], [106, 22, 'CR', 5]]],
[TimberFull(plot_factor, 'DF', 17.8, 116), [[42, 40, 'S2', 0], [83, 40, 'S3', 0], [100, 16, 'S4', 10]]],
[TimberFull(plot_factor, 'DF', 22.3, 125), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [108, 24, 'S4', 0]]]]
]
for trees in tree_data:
plot = Plot()
for tree, logs in trees:
for log in logs:
tree.add_log(*log)
plot.add_tree(tree)
stand.add_plot(plot)
path = make_dir_and_subdir(workflow_number)
stand.console_report()
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
thin120ba = ThinBA(stand, 120, species_to_cut=['DF', 'WH'])
thin120ba.console_report()
end_message = """**WORKFLOW 2 created a FULL CRUISE stand from manually entered tree data.
It then ran a thinning scenario with a target density of 120 Basal Area per Acre harvesting only DF and WH considering all diameter ranges.
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_2/
"""
print(f'\n\n{end_message}')
def workflow_3(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('EX4', -30)
stand.import_sheet_quick(get_package_path('Example_Excel_quick.xlsx'))
stand.console_report()
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
thin25rd = ThinRD(stand, 25, species_to_cut=['DF', 'WH'], min_dbh_to_cut=10, max_dbh_to_cut=18)
thin25rd.console_report()
end_message = """**WORKFLOW 3 created a QUICK CRUISE stand from importing plot data from an excel sheet.
It then ran a thinning scenario with a target density of 25 Relative Density per Acre harvesting only DF and WH, with a
minimum dbh of 10 inches and a maximum dbh of 18 inches. ** Note this thinning density won't be able to be achieved
fully because our parameters don't allow for the needed harvest density, but this is to illustrate that the thinning
will let the user know how much density was taken and how much more is needed to achieve the desired density target
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_3/
"""
print(f'\n\n{end_message}')
def workflow_4(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('OK2', 46.94)
stand.import_sheet_full(get_package_path('Example_CSV_full.csv'))
stand.console_report()
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
try:
thin100tpa = ThinTPA(stand, 100)
thin100tpa.console_report()
except TargetDensityError as e:
print(traceback.format_exc())
end_message = """**WORKFLOW 4 created a FULL CRUISE stand from importing plot data from an csv sheet.
It then ran a thinning scenario with a target density of 100 Trees per Acre considering all species and diameter ranges.
** Note this thinning density is greater than the current stand density and the Thin Class will throw a TargetDensityError exception
which will explain what went wrong.
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_4/
"""
print(f'\n\n{end_message}')
def workflow_5(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('EX3', 33.3)
stand.import_sheet_quick(get_package_path('Example_CSV_quick.csv'))
stand.pdf_report(join(path, 'stand_report.pdf'))
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
thin140ba = ThinBA(stand, 140, species_to_cut=['DF', 'WH', 'RA'], max_dbh_to_cut=24)
thin140ba.pdf_report(join(path, 'thin_report.pdf'))
end_message = """**WORKFLOW 5 created a QUICK CRUISE stand from importing plot data from an csv sheet.
It then ran a thinning scenario with a target density of 140 Basal Area per Acre harvesting only DF, WH and RA with a maximum diameter of 24 inches.
Outputs:
Stand PDF report "stand_report.pdf" from [stand_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/
Thinning PDF report "thin_report.pdf" from [thin_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_5/
"""
print(f'\n\n{end_message}')
def workflow_6(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('OK1', -30)
stand.import_sheet_full(get_package_path('Example_Excel_full.xlsx'))
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
fvs = FVS()
fvs.set_stand(stand, 'PN', 612, 6, 45, 'DF', 110)
fvs.access_db('access_db', directory=path)
fvs.sqlite_db('sqlite_db', directory=path)
fvs.excel_db('excel_db', directory=path)
end_message = """**WORKFLOW 6 created a FULL CRUISE stand from importing plot data from an excel sheet.
It then ran the FVS module to create FVS formatted databases from the stand data. FVS is the US Forest Service's Forest Vegetation Simulator.
Outputs:
FVS Access database "access_db.db" from [fvs_class.access_db()] in desktop/treetopper_outputs/workflow_6/
FVS Suppose file "Suppose.loc" in desktop/treetopper_outputs/workflow_6/. ** FVS Legacy needs a .loc file along with the database.
FVS SQLite database "sqlite_db.db" from [fvs_class.sqlite_db()] in desktop/treetopper_outputs/workflow_6/
FVS Excel database "excel_db.db" from [fvs_class.excel_db()] in desktop/treetopper_outputs/workflow_6/
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_6/
"""
print(f'\n\n{end_message}')
def main(workflow_number):
opts = {
1: workflow_1,
2: workflow_2,
3: workflow_3,
4: workflow_4,
5: workflow_5,
6: workflow_6
}
opts[workflow_number](workflow_number)
print(f"\n\n{'-' * 200}\n\n")
main(wf)
print(f"\n\n{'-' * 200}\n\n")
| [
[
[
21,
30
],
[
4287,
4296
]
],
[
[
36,
42
],
[
4149,
4155
],
[
6367,
6373
],
[
7018,
7024
]
],
[
[
65,
69
],
[
4088,
4092
],
[
4144,
4148
],
[
6306,
6310
],
[
6362,
6366
],
[
6957,
6961
],
[
7013,
7017
]
],
[
[
85,
92
],
[
3621,
3628
]
],
[
[
115,
121
],
[
6597,
6603
]
],
[
[
127,
132
],
[
6622,
6627
]
],
[
[
162,
170
],
[
7253,
7261
]
],
[
[
176,
189
],
[
7078,
7091
]
],
[
[
221,
225
],
[
9457,
9461
],
[
9546,
9550
],
[
9815,
9819
],
[
9922,
9926
],
[
11416,
11420
],
[
17441,
17445
]
],
[
[
231,
239
],
[
17684,
17692
]
],
[
[
245,
250
],
[
17497,
17502
]
],
[
[
281,
285
],
[
4333,
4337
],
[
5316,
5320
],
[
5740,
5744
],
[
21186,
21190
],
[
23992,
23996
]
],
[
[
322,
333
],
[
5394,
5405
],
[
20323,
20334
],
[
20366,
20377
],
[
20431,
20442
],
[
20474,
20485
],
[
20539,
20550
],
[
20636,
20647
],
[
20679,
20690
],
[
20744,
20755
],
[
20787,
20798
],
[
20852,
20863
],
[
20895,
20906
],
[
20960,
20971
],
[
21003,
21014
],
[
21068,
21079
]
],
[
[
339,
349
],
[
5892,
5902
],
[
22240,
22250
],
[
22367,
22377
],
[
22473,
22483
],
[
22579,
22589
],
[
22686,
22696
],
[
22844,
22854
],
[
22970,
22980
],
[
23076,
23086
],
[
23202,
23212
],
[
23309,
23319
],
[
23435,
23445
],
[
23562,
23572
],
[
23688,
23698
],
[
23815,
23825
]
],
[
[
379,
382
]
],
[
[
417,
424
],
[
21464,
21471
],
[
26680,
26687
]
],
[
[
430,
436
],
[
24352,
24358
],
[
27893,
27899
]
],
[
[
442,
448
],
[
25316,
25322
]
],
[
[
486,
504
],
[
26755,
26773
]
],
[
[
532,
535
],
[
28954,
28957
]
],
[
[
576,
580
],
[
4656,
4660
],
[
9160,
9164
],
[
17533,
17537
]
],
[
[
586,
603
],
[
14333,
14350
],
[
15607,
15624
]
],
[
[
609,
619
],
[
15042,
15052
]
],
[
[
625,
636
],
[
10533,
10544
],
[
14443,
14454
]
],
[
[
642,
654
],
[
13251,
13263
],
[
13515,
13527
]
],
[
[
693,
705
],
[
13468,
13480
],
[
14946,
14958
],
[
16961,
16973
],
[
17169,
17181
]
],
[
[
711,
721
],
[
17076,
17086
]
],
[
[
727,
742
],
[
4013,
4028
],
[
6231,
6246
],
[
6881,
6896
]
],
[
[
748,
760
],
[
15180,
15192
],
[
17243,
17255
]
],
[
[
766,
776
],
[
2554,
2564
]
],
[
[
782,
805
],
[
12796,
12819
]
],
[
[
851,
868
],
[
5221,
5238
],
[
5645,
5662
]
],
[
[
913,
932
],
[
2851,
2870
],
[
18339,
18358
]
],
[
[
938,
954
],
[
3010,
3026
],
[
18411,
18427
]
],
[
[
960,
977
],
[
3175,
3192
],
[
18479,
18496
]
],
[
[
1014,
1017
],
[
18597,
18600
]
],
[
[
1026,
1031
],
[
20191,
20196
],
[
22106,
22111
],
[
25098,
25103
],
[
26447,
26452
],
[
27650,
27655
],
[
28774,
28779
]
],
[
[
18754,
18762
],
[
19641,
19649
]
],
[
[
18774,
18783
],
[
26798,
26807
]
],
[
[
18795,
18798
],
[
19341,
19344
]
],
[
[
18818,
18823
],
[
19116,
19121
],
[
19231,
19236
]
],
[
[
18825,
18831
],
[
4149,
4155
],
[
6367,
6373
],
[
7018,
7024
]
],
[
[
18856,
18860
],
[
4088,
4092
],
[
4144,
4148
],
[
6306,
6310
],
[
6362,
6366
],
[
6957,
6961
],
[
7013,
7017
],
[
19038,
19042
],
[
19148,
19152
],
[
19478,
19482
],
[
19524,
19528
],
[
19578,
19582
],
[
21405,
21409
],
[
24291,
24295
],
[
25256,
25260
],
[
26601,
26605
],
[
27771,
27775
],
[
27832,
27836
],
[
27995,
27999
],
[
28899,
28903
]
],
[
[
18862,
18868
],
[
6396,
6402
],
[
7047,
7053
]
],
[
[
18870,
18875
],
[
19089,
19094
],
[
19204,
19209
]
],
[
[
18877,
18887
]
],
[
[
18922,
18938
],
[
19001,
19017
]
],
[
[
18948,
18967
],
[
21309,
21328
],
[
24193,
24212
],
[
25044,
25063
],
[
26393,
26412
],
[
27596,
27615
],
[
28720,
28739
]
],
[
[
19276,
19292
],
[
25149,
25165
],
[
26499,
26515
],
[
27702,
27718
],
[
28824,
28840
]
],
[
[
19632,
19638
],
[
19713,
19719
],
[
19856,
19862
]
],
[
[
19849,
19853
],
[
19886,
19890
]
],
[
[
19881,
19883
],
[
19934,
19936
],
[
20132,
20134
]
],
[
[
20060,
20062
],
[
19934,
19936
],
[
20132,
20134
]
],
[
[
20123,
20125
],
[
30354,
30356
]
],
[
[
20146,
20156
],
[
30106,
30116
]
],
[
[
22061,
22071
],
[
30133,
30143
]
],
[
[
25000,
25010
],
[
30160,
30170
]
],
[
[
26349,
26359
],
[
30187,
30197
]
],
[
[
27552,
27562
],
[
30214,
30224
]
],
[
[
28676,
28686
],
[
30241,
30251
]
],
[
[
30051,
30055
],
[
30349,
30353
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def example1():
"""Slice operator.
seq[::stride] # [seq[0], seq[stride], ..., seq[-1] ]
seq[low::stride] # [seq[low], seq[low+stride], ..., seq[-1] ]
seq[:high:stride] # [seq[0], seq[stride], ..., seq[high-1]]
seq[low:high:stride] # [seq[low], seq[low+stride], ..., seq[high-1]]
"""
l = list("01234567")
assert l[::2] == list("0246") # 从 index(0) 开始, 隔2个取一个
assert l[1::2] == list("1357") # 从 index(1) 开始, 隔2个取一个
assert l[:4:2] == list("02") # 从头开始到 index(4-1) 为止,隔2个取一个
assert l[2:6:2] == list("24") # 从index(2)开始到index(6-1)为止,隔2个取一个
example1()
def example2():
"""Reversed slice operator
"""
l = list("01234567")
assert l[::-1] == list("76543210") # 从最后一个开始,逆序排列
assert l[::-2] == list("7531") # 从最后一个开始,隔2个取一个
assert l[-2::-2] == list("6420") # 从-2开始,隔2个取一个
assert l[:3:-2] == list("75") # 从最后开始,到3为止,隔2个取一个
example2()
| [
[
[
52,
60
],
[
670,
678
]
],
[
[
687,
695
],
[
981,
989
]
]
] |
"""
Common logic used by the docker state and execution module
This module contains logic to accommodate docker/salt CLI usage, as well as
input as formatted by states.
"""
import copy
import logging
import salt.utils.args
import salt.utils.data
import salt.utils.dockermod.translate
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.utils.args import get_function_argspec as _argspec
from salt.utils.dockermod.translate.helpers import split as _split
try:
import docker
except ImportError:
docker = None
# These next two imports are only necessary to have access to the needed
# functions so that we can get argspecs for the container config, host config,
# and networking config (see the get_client_args() function).
try:
import docker.types
except ImportError:
pass
try:
import docker.utils
except ImportError:
pass
NOTSET = object()
__virtualname__ = "docker"
# Default timeout as of docker-py 1.0.0
CLIENT_TIMEOUT = 60
# Timeout for stopping the container, before a kill is invoked
SHUTDOWN_TIMEOUT = 10
log = logging.getLogger(__name__)
def __virtual__():
if docker is None:
return False
return __virtualname__
def get_client_args(limit=None):
if docker is None:
raise CommandExecutionError("docker Python module not imported")
limit = salt.utils.args.split_input(limit or [])
ret = {}
if not limit or any(
x in limit
for x in ("create_container", "host_config", "connect_container_to_network")
):
try:
ret["create_container"] = _argspec(docker.APIClient.create_container).args
except AttributeError:
try:
ret["create_container"] = _argspec(docker.Client.create_container).args
except AttributeError:
raise CommandExecutionError("Coult not get create_container argspec")
try:
ret["host_config"] = _argspec(docker.types.HostConfig.__init__).args
except AttributeError:
try:
ret["host_config"] = _argspec(docker.utils.create_host_config).args
except AttributeError:
raise CommandExecutionError("Could not get create_host_config argspec")
try:
ret["connect_container_to_network"] = _argspec(
docker.types.EndpointConfig.__init__
).args
except AttributeError:
try:
ret["connect_container_to_network"] = _argspec(
docker.utils.utils.create_endpoint_config
).args
except AttributeError:
try:
ret["connect_container_to_network"] = _argspec(
docker.utils.create_endpoint_config
).args
except AttributeError:
raise CommandExecutionError(
"Could not get connect_container_to_network argspec"
)
for key, wrapped_func in (
("logs", docker.api.container.ContainerApiMixin.logs),
("create_network", docker.api.network.NetworkApiMixin.create_network),
):
if not limit or key in limit:
try:
func_ref = wrapped_func
try:
# functools.wraps makes things a little easier in Python 3
ret[key] = _argspec(func_ref.__wrapped__).args
except AttributeError:
# functools.wraps changed (unlikely), bail out
ret[key] = []
except AttributeError:
# Function moved, bail out
ret[key] = []
if not limit or "ipam_config" in limit:
try:
ret["ipam_config"] = _argspec(docker.types.IPAMPool.__init__).args
except AttributeError:
try:
ret["ipam_config"] = _argspec(docker.utils.create_ipam_pool).args
except AttributeError:
raise CommandExecutionError("Could not get ipam args")
for item in ret:
# The API version is passed automagically by the API code that imports
# these classes/functions and is not an arg that we will be passing, so
# remove it if present. Similarly, don't include "self" if it shows up
# in the arglist.
for argname in ("version", "self"):
try:
ret[item].remove(argname)
except ValueError:
pass
# Remove any args in host or endpoint config from the create_container
# arglist. This keeps us from accidentally allowing args that docker-py has
# moved from the create_container function to the either the host or
# endpoint config.
for item in ("host_config", "connect_container_to_network"):
for val in ret.get(item, []):
try:
ret["create_container"].remove(val)
except ValueError:
# Arg is not in create_container arglist
pass
for item in ("create_container", "host_config", "connect_container_to_network"):
if limit and item not in limit:
ret.pop(item, None)
try:
ret["logs"].remove("container")
except (KeyError, ValueError, TypeError):
pass
return ret
def translate_input(
translator,
skip_translate=None,
ignore_collisions=False,
validate_ip_addrs=True,
**kwargs
):
"""
Translate CLI/SLS input into the format the API expects. The ``translator``
argument must be a module containing translation functions, within
salt.utils.dockermod.translate. A ``skip_translate`` kwarg can be passed to
control which arguments are translated. It can be either a comma-separated
list or an iterable containing strings (e.g. a list or tuple), and members
of that tuple will have their translation skipped. Optionally,
skip_translate can be set to True to skip *all* translation.
"""
kwargs = copy.deepcopy(salt.utils.args.clean_kwargs(**kwargs))
invalid = {}
collisions = []
if skip_translate is True:
# Skip all translation
return kwargs
else:
if not skip_translate:
skip_translate = ()
else:
try:
skip_translate = _split(skip_translate)
except AttributeError:
pass
if not hasattr(skip_translate, "__iter__"):
log.error("skip_translate is not an iterable, ignoring")
skip_translate = ()
try:
# Using list(kwargs) here because if there are any invalid arguments we
# will be popping them from the kwargs.
for key in list(kwargs):
real_key = translator.ALIASES.get(key, key)
if real_key in skip_translate:
continue
# ipam_pools is designed to be passed as a list of actual
# dictionaries, but if each of the dictionaries passed has a single
# element, it will be incorrectly repacked.
if key != "ipam_pools" and salt.utils.data.is_dictlist(kwargs[key]):
kwargs[key] = salt.utils.data.repack_dictlist(kwargs[key])
try:
kwargs[key] = getattr(translator, real_key)(
kwargs[key],
validate_ip_addrs=validate_ip_addrs,
skip_translate=skip_translate,
)
except AttributeError:
log.debug("No translation function for argument '%s'", key)
continue
except SaltInvocationError as exc:
kwargs.pop(key)
invalid[key] = exc.strerror
try:
translator._merge_keys(kwargs)
except AttributeError:
pass
# Convert CLI versions of commands to their docker-py counterparts
for key in translator.ALIASES:
if key in kwargs:
new_key = translator.ALIASES[key]
value = kwargs.pop(key)
if new_key in kwargs:
collisions.append(new_key)
else:
kwargs[new_key] = value
try:
translator._post_processing(kwargs, skip_translate, invalid)
except AttributeError:
pass
except Exception as exc: # pylint: disable=broad-except
error_message = exc.__str__()
log.error("Error translating input: '%s'", error_message, exc_info=True)
else:
error_message = None
error_data = {}
if error_message is not None:
error_data["error_message"] = error_message
if invalid:
error_data["invalid"] = invalid
if collisions and not ignore_collisions:
for item in collisions:
error_data.setdefault("collisions", []).append(
"'{}' is an alias for '{}', they cannot both be used".format(
translator.ALIASES_REVMAP[item], item
)
)
if error_data:
raise CommandExecutionError("Failed to translate input", info=error_data)
return kwargs
def create_ipam_config(*pools, **kwargs):
"""
Builds an IP address management (IPAM) config dictionary
"""
kwargs = salt.utils.args.clean_kwargs(**kwargs)
try:
# docker-py 2.0 and newer
pool_args = salt.utils.args.get_function_argspec(
docker.types.IPAMPool.__init__
).args
create_pool = docker.types.IPAMPool
create_config = docker.types.IPAMConfig
except AttributeError:
# docker-py < 2.0
pool_args = salt.utils.args.get_function_argspec(
docker.utils.create_ipam_pool
).args
create_pool = docker.utils.create_ipam_pool
create_config = docker.utils.create_ipam_config
for primary_key, alias_key in (("driver", "ipam_driver"), ("options", "ipam_opts")):
if alias_key in kwargs:
alias_val = kwargs.pop(alias_key)
if primary_key in kwargs:
log.warning(
"docker.create_ipam_config: Both '%s' and '%s' "
"passed. Ignoring '%s'",
alias_key,
primary_key,
alias_key,
)
else:
kwargs[primary_key] = alias_val
if salt.utils.data.is_dictlist(kwargs.get("options")):
kwargs["options"] = salt.utils.data.repack_dictlist(kwargs["options"])
# Get all of the IPAM pool args that were passed as individual kwargs
# instead of in the *pools tuple
pool_kwargs = {}
for key in list(kwargs):
if key in pool_args:
pool_kwargs[key] = kwargs.pop(key)
pool_configs = []
if pool_kwargs:
pool_configs.append(create_pool(**pool_kwargs))
pool_configs.extend([create_pool(**pool) for pool in pools])
if pool_configs:
# Sanity check the IPAM pools. docker-py's type/function for creating
# an IPAM pool will allow you to create a pool with a gateway, IP
# range, or map of aux addresses, even when no subnet is passed.
# However, attempting to use this IPAM pool when creating the network
# will cause the Docker Engine to throw an error.
if any("Subnet" not in pool for pool in pool_configs):
raise SaltInvocationError("A subnet is required in each IPAM pool")
else:
kwargs["pool_configs"] = pool_configs
ret = create_config(**kwargs)
pool_dicts = ret.get("Config")
if pool_dicts:
# When you inspect a network with custom IPAM configuration, only
# arguments which were explictly passed are reflected. By contrast,
# docker-py will include keys for arguments which were not passed in
# but set the value to None. Thus, for ease of comparison, the below
# loop will remove all keys with a value of None from the generated
# pool configs.
for idx, _ in enumerate(pool_dicts):
for key in list(pool_dicts[idx]):
if pool_dicts[idx][key] is None:
del pool_dicts[idx][key]
return ret
| [
[
[
183,
187
],
[
5991,
5995
]
],
[
[
195,
202
],
[
1080,
1087
]
],
[
[
211,
226
]
],
[
[
234,
249
]
],
[
[
257,
287
],
[
1344,
1348
],
[
6005,
6009
],
[
7089,
7093
],
[
7161,
7165
],
[
9271,
9275
],
[
9374,
9378
],
[
9635,
9639
],
[
10375,
10379
],
[
10455,
10459
]
],
[
[
316,
337
],
[
1272,
1293
],
[
1828,
1849
],
[
2176,
2197
],
[
2861,
2882
],
[
3998,
4019
],
[
9050,
9071
]
],
[
[
339,
358
],
[
7599,
7618
],
[
11372,
11391
]
],
[
[
387,
419
],
[
1586,
1594
],
[
1725,
1733
],
[
1939,
1947
],
[
2072,
2080
],
[
2306,
2314
],
[
2490,
2498
],
[
2699,
2707
],
[
3390,
3398
],
[
3765,
3773
],
[
3896,
3904
]
],
[
[
471,
486
],
[
6304,
6310
]
],
[
[
504,
510
],
[
1136,
1142
],
[
1242,
1248
],
[
1595,
1601
],
[
1734,
1740
],
[
1948,
1954
],
[
2081,
2087
],
[
2332,
2338
],
[
2520,
2526
],
[
2733,
2739
],
[
3032,
3038
],
[
3105,
3111
],
[
3774,
3780
],
[
3905,
3911
],
[
9424,
9430
],
[
9492,
9498
],
[
9538,
9544
],
[
9685,
9691
],
[
9752,
9758
],
[
9806,
9812
]
],
[
[
535,
541
],
[
1136,
1142
],
[
1242,
1248
],
[
1595,
1601
],
[
1734,
1740
],
[
1948,
1954
],
[
2081,
2087
],
[
2332,
2338
],
[
2520,
2526
],
[
2733,
2739
],
[
3032,
3038
],
[
3105,
3111
],
[
3774,
3780
],
[
3905,
3911
],
[
9424,
9430
],
[
9492,
9498
],
[
9538,
9544
],
[
9685,
9691
],
[
9752,
9758
],
[
9806,
9812
]
],
[
[
780,
792
],
[
1136,
1142
],
[
1242,
1248
],
[
1595,
1601
],
[
1734,
1740
],
[
1948,
1954
],
[
2081,
2087
],
[
2332,
2338
],
[
2520,
2526
],
[
2733,
2739
],
[
3032,
3038
],
[
3105,
3111
],
[
3774,
3780
],
[
3905,
3911
],
[
9424,
9430
],
[
9492,
9498
],
[
9538,
9544
],
[
9685,
9691
],
[
9752,
9758
],
[
9806,
9812
]
],
[
[
838,
850
],
[
1136,
1142
],
[
1242,
1248
],
[
1595,
1601
],
[
1734,
1740
],
[
1948,
1954
],
[
2081,
2087
],
[
2332,
2338
],
[
2520,
2526
],
[
2733,
2739
],
[
3032,
3038
],
[
3105,
3111
],
[
3774,
3780
],
[
3905,
3911
],
[
9424,
9430
],
[
9492,
9498
],
[
9538,
9544
],
[
9685,
9691
],
[
9752,
9758
],
[
9806,
9812
]
],
[
[
881,
887
]
],
[
[
900,
915
],
[
1184,
1199
]
],
[
[
968,
982
]
],
[
[
1051,
1067
]
],
[
[
1074,
1077
],
[
6455,
6458
],
[
7495,
7498
],
[
8437,
8440
],
[
10061,
10064
]
],
[
[
1114,
1125
]
],
[
[
1206,
1221
]
],
[
[
5310,
5325
]
],
[
[
9143,
9161
]
]
] |
# Copyright (c) 2019 UniMoRe, Matteo Spallanzani
import torch
from ..utils.utils import xywh2xyxy, bbox_iou
def clip_boxes(boxes):
boxes[:, [0, 2]] = boxes[:, [0, 2]].clamp(min=0, max=1)
boxes[:, [1, 3]] = boxes[:, [1, 3]].clamp(min=0, max=1)
def postprocess_pr(pr_outs, conf_thres=0.001, overlap_thres=0.5):
"""Restructure YOLOv3Tiny tensors into lists, then filter out non-maximal
(redundant) annotations from the predictions."""
# pr_outs = [[bs, grid_positions, 85], [bs, 4*grid_positions, 85]]
# when its two components are concatenated, we get a tensor [bs, 5*gridpositions, 85], which `bs` "slices"
# have to be "stripped" to remove redundant components
# strip each slice (corresponding to a single image in the batch) to get sequences of (possibly) different lengths:
# the natural data structure to use to collect these sequences is a list
pr_outs = [p.view(p.size(0), -1, p.size(-1)) for p in pr_outs]
pr_outs = torch.cat(pr_outs, 1).detach().cpu()
pr_labels = [None] * len(pr_outs)
for img_id, pr in enumerate(pr_outs):
# filter out irrelevant predictions
pr_cls_prob, pr_cls_id = pr[:, 5:].max(1)
pr[:, 4] *= pr_cls_prob
i = (pr[:, 4] > conf_thres) & torch.isfinite(pr).all(1)
pr = pr[i]
if len(pr) == 0:
continue
pr_cls_prob = pr_cls_prob[i]
pr_cls_id = pr_cls_id[i].unsqueeze(1).float()
pr[:, :4] = xywh2xyxy(pr[:, :4])
pr = torch.cat((pr[:, :5], pr_cls_prob.unsqueeze(1), pr_cls_id), 1)
pr = pr[(-pr[:, 4]).argsort()]
detections = []
for c in pr[:, -1].unique():
pr_anno_c = pr[pr[:, -1] == c]
n = len(pr_anno_c)
if n == 1:
detections.append(pr_anno_c)
continue
elif n > 100:
pr_anno_c = pr_anno_c[:100]
while len(pr_anno_c) > 0:
if len(pr_anno_c) == 1:
detections.append(pr_anno_c)
break
redundant = bbox_iou(pr_anno_c[0], pr_anno_c) > overlap_thres
weights = pr_anno_c[redundant, 4:5]
pr_anno_c[0, :4] = (weights * pr_anno_c[redundant, 0:4]).sum(0) / weights.sum()
detections.append(pr_anno_c[0:1]) # keep leading dimension 1 for 1D tensor
pr_anno_c = pr_anno_c[~redundant]
if len(detections) > 0:
detections = torch.cat(detections)
clip_boxes(detections[:, :4])
pr_labels[img_id] = detections[(-detections[:, 4]).argsort()]
return pr_labels
def postprocess_gt(gt_labels):
gt_labels = gt_labels.detach().cpu()
bs = gt_labels[0, 0].to(torch.int)
gt_labels = [gt_labels[gt_labels[:, 1] == i, 2:] for i in range(bs)]
return gt_labels
| [
[
[
57,
62
],
[
973,
978
],
[
1254,
1259
],
[
1490,
1495
],
[
2468,
2473
],
[
2729,
2734
]
],
[
[
90,
99
],
[
1456,
1465
]
],
[
[
101,
109
],
[
2071,
2079
]
],
[
[
116,
126
],
[
2502,
2512
]
],
[
[
261,
275
]
],
[
[
2633,
2647
]
]
] |
"""
This is an implementation of Function Secret Sharing
Useful papers are:
- Function Secret Sharing- Improvements and Extensions, Boyle 2017
Link: https://eprint.iacr.org/2018/707.pdf
- Secure Computation with Preprocessing via Function Secret Sharing, Boyle 2019
Link: https://eprint.iacr.org/2019/1095
Note that the protocols are quite different in aspect from those papers
"""
import hashlib
import torch as th
import syft as sy
λ = 110 # 6 # 110 or 63 # security parameter
n = 32 # 8 # 32 # bit precision
dtype = th.int32
no_wrap = {"no_wrap": True}
def initialize_crypto_plans(worker):
"""
This is called manually for the moment, to build the plan used to perform
Function Secret Sharing on a specific worker.
"""
eq_plan_1 = sy.Plan(
forward_func=lambda x, y: mask_builder(x, y, "eq"),
owner=worker,
tags=["#fss_eq_plan_1"],
is_built=True,
)
worker.register_obj(eq_plan_1)
eq_plan_2 = sy.Plan(
forward_func=eq_eval_plan, owner=worker, tags=["#fss_eq_plan_2"], is_built=True
)
worker.register_obj(eq_plan_2)
comp_plan_1 = sy.Plan(
forward_func=lambda x, y: mask_builder(x, y, "comp"),
owner=worker,
tags=["#fss_comp_plan_1"],
is_built=True,
)
worker.register_obj(comp_plan_1)
comp_plan_2 = sy.Plan(
forward_func=comp_eval_plan, owner=worker, tags=["#fss_comp_plan_2"], is_built=True
)
worker.register_obj(comp_plan_2)
xor_add_plan = sy.Plan(
forward_func=xor_add_convert_1, owner=worker, tags=["#xor_add_1"], is_built=True
)
worker.register_obj(xor_add_plan)
xor_add_plan = sy.Plan(
forward_func=xor_add_convert_2, owner=worker, tags=["#xor_add_2"], is_built=True
)
worker.register_obj(xor_add_plan)
def request_run_plan(worker, plan_tag, location, return_value, args=(), kwargs={}):
response_ids = (sy.ID_PROVIDER.pop(),)
args = (args, response_ids)
response = worker.send_command(
cmd_name="run",
target=plan_tag,
recipient=location,
return_ids=response_ids,
return_value=return_value,
kwargs_=kwargs,
args_=args,
)
return response
def fss_op(x1, x2, type_op="eq"):
"""
Define the workflow for a binary operation using Function Secret Sharing
Currently supported operand are = & <=, respectively corresponding to
type_op = 'eq' and 'comp'
Args:
x1: first AST
x2: second AST
type_op: type of operation to perform, should be 'eq' or 'comp'
Returns:
shares of the comparison
"""
me = sy.local_worker
locations = x1.locations
shares = []
for location in locations:
args = (x1.child[location.id], x2.child[location.id])
share = request_run_plan(
me, f"#fss_{type_op}_plan_1", location, return_value=True, args=args
)
shares.append(share)
mask_value = sum(shares) % 2 ** n
shares = []
for i, location in enumerate(locations):
args = (th.IntTensor([i]), mask_value)
share = request_run_plan(
me, f"#fss_{type_op}_plan_2", location, return_value=False, args=args
)
shares.append(share)
if type_op == "comp":
prev_shares = shares
shares = []
for prev_share, location in zip(prev_shares, locations):
share = request_run_plan(
me, "#xor_add_1", location, return_value=True, args=(prev_share,)
)
shares.append(share)
masked_value = shares[0] ^ shares[1] # TODO case >2 workers ?
shares = {}
for i, prev_share, location in zip(range(len(locations)), prev_shares, locations):
share = request_run_plan(
me,
"#xor_add_2",
location,
return_value=False,
args=(th.IntTensor([i]), masked_value),
)
shares[location.id] = share
else:
shares = {loc.id: share for loc, share in zip(locations, shares)}
response = sy.AdditiveSharingTensor(shares, **x1.get_class_attributes())
return response
# share level
def mask_builder(x1, x2, type_op):
x = x1 - x2
# Keep the primitive in store as we use it after
alpha, s_0, *CW = x1.owner.crypto_store.get_keys(
f"fss_{type_op}", n_instances=x1.numel(), remove=False
)
return x + alpha.reshape(x.shape)
# share level
def eq_eval_plan(b, x_masked):
alpha, s_0, *CW = x_masked.owner.crypto_store.get_keys(
type_op="fss_eq", n_instances=x_masked.numel(), remove=True
)
result_share = DPF.eval(b, x_masked, s_0, *CW)
return result_share
# share level
def comp_eval_plan(b, x_masked):
alpha, s_0, *CW = x_masked.owner.crypto_store.get_keys(
type_op="fss_comp", n_instances=x_masked.numel(), remove=True
)
result_share = DIF.eval(b, x_masked, s_0, *CW)
return result_share
def xor_add_convert_1(x):
xor_share, add_share = x.owner.crypto_store.get_keys(
type_op="xor_add_couple", n_instances=x.numel(), remove=False
)
return x ^ xor_share.reshape(x.shape)
def xor_add_convert_2(b, x):
xor_share, add_share = x.owner.crypto_store.get_keys(
type_op="xor_add_couple", n_instances=x.numel(), remove=True
)
return add_share.reshape(x.shape) * (1 - 2 * x) + x * b
def eq(x1, x2):
return fss_op(x1, x2, "eq")
def le(x1, x2):
return fss_op(x1, x2, "comp")
class DPF:
"""Distributed Point Function - used for equality"""
def __init__(self):
pass
@staticmethod
def keygen(n_values=1):
beta = th.tensor([1], dtype=dtype)
alpha = th.randint(0, 2 ** n, (n_values,))
α = bit_decomposition(alpha)
s, t, CW = (
Array(n + 1, 2, λ, n_values),
Array(n + 1, 2, n_values),
Array(n, 2 * (λ + 1), n_values),
)
s[0] = randbit(size=(2, λ, n_values))
t[0] = th.tensor([[0, 1]] * n_values, dtype=th.uint8).t()
for i in range(0, n):
g0 = G(s[i, 0])
g1 = G(s[i, 1])
# Re-use useless randomness
sL_0, _, sR_0, _ = split(g0, [λ, 1, λ, 1])
sL_1, _, sR_1, _ = split(g1, [λ, 1, λ, 1])
s_rand = (sL_0 ^ sL_1) * α[i] + (sR_0 ^ sR_1) * (1 - α[i])
cw_i = TruthTableDPF(s_rand, α[i])
CW[i] = cw_i ^ g0 ^ g1
for b in (0, 1):
τ = [g0, g1][b] ^ (t[i, b] * CW[i])
τ = τ.reshape(2, λ + 1, n_values)
# filtered_τ = τ[𝛼[i]] OLD
α_i = α[i].unsqueeze(0).expand(λ + 1, n_values).unsqueeze(0).long()
filtered_τ = th.gather(τ, 0, α_i).squeeze(0)
s[i + 1, b], t[i + 1, b] = split(filtered_τ, [λ, 1])
CW_n = (-1) ** t[n, 1].to(dtype) * (beta - Convert(s[n, 0]) + Convert(s[n, 1]))
return (alpha,) + s[0].unbind() + (CW, CW_n)
@staticmethod
def eval(b, x, *k_b):
original_shape = x.shape
x = x.reshape(-1)
n_values = x.shape[0]
x = bit_decomposition(x)
s, t = Array(n + 1, λ, n_values), Array(n + 1, 1, n_values)
s[0] = k_b[0]
# here k[1:] is (CW, CW_n)
CW = k_b[1].unbind() + (k_b[2],)
t[0] = b
for i in range(0, n):
τ = G(s[i]) ^ (t[i] * CW[i])
τ = τ.reshape(2, λ + 1, n_values)
x_i = x[i].unsqueeze(0).expand(λ + 1, n_values).unsqueeze(0).long()
filtered_τ = th.gather(τ, 0, x_i).squeeze(0)
s[i + 1], t[i + 1] = split(filtered_τ, [λ, 1])
flat_result = (-1) ** b * (Convert(s[n]) + t[n].squeeze() * CW[n])
return flat_result.reshape(original_shape)
class DIF:
"""Distributed Interval Function - used for comparison <="""
def __init__(self):
pass
@staticmethod
def keygen(n_values=1):
alpha = th.randint(0, 2 ** n, (n_values,))
α = bit_decomposition(alpha)
s, t, CW = (
Array(n + 1, 2, λ, n_values),
Array(n + 1, 2, n_values),
Array(n, 2 + 2 * (λ + 1), n_values),
)
s[0] = randbit(size=(2, λ, n_values))
t[0] = th.tensor([[0, 1]] * n_values, dtype=th.uint8).t()
for i in range(0, n):
h0 = H(s[i, 0])
h1 = H(s[i, 1])
# Re-use useless randomness
_, _, sL_0, _, sR_0, _ = split(h0, [1, 1, λ, 1, λ, 1])
_, _, sL_1, _, sR_1, _ = split(h1, [1, 1, λ, 1, λ, 1])
s_rand = (sL_0 ^ sL_1) * α[i] + (sR_0 ^ sR_1) * (1 - α[i])
cw_i = TruthTableDIF(s_rand, α[i])
CW[i] = cw_i ^ h0 ^ h1
for b in (0, 1):
τ = [h0, h1][b] ^ (t[i, b] * CW[i])
τ = τ.reshape(2, λ + 2, n_values)
# filtered_τ = τ[𝛼[i]] OLD
α_i = α[i].unsqueeze(0).expand(λ + 2, n_values).unsqueeze(0).long()
filtered_τ = th.gather(τ, 0, α_i).squeeze(0)
σ_leaf, s[i + 1, b], t[i + 1, b] = split(filtered_τ, [1, λ, 1])
return (alpha,) + s[0].unbind() + (CW,)
@staticmethod
def eval(b, x, *k_b):
original_shape = x.shape
x = x.reshape(-1)
n_values = x.shape[0]
x = bit_decomposition(x)
FnOutput = Array(n + 1, n_values)
s, t = Array(n + 1, λ, n_values), Array(n + 1, 1, n_values)
s[0] = k_b[0]
CW = k_b[1].unbind()
t[0] = b
for i in range(0, n):
τ = H(s[i]) ^ (t[i] * CW[i])
τ = τ.reshape(2, λ + 2, n_values)
x_i = x[i].unsqueeze(0).expand(λ + 2, n_values).unsqueeze(0).long()
filtered_τ = th.gather(τ, 0, x_i).squeeze(0)
σ_leaf, s[i + 1], t[i + 1] = split(filtered_τ, [1, λ, 1])
FnOutput[i] = σ_leaf
# Last tour, the other σ is also a leaf:
FnOutput[n] = t[n]
flat_result = FnOutput.sum(axis=0) % 2
return flat_result.reshape(original_shape)
# PRG
def G(seed):
assert seed.shape[0] == λ
seed_t = seed.t().tolist()
gen_list = []
for seed_bit in seed_t:
enc_str = str(seed_bit).encode()
h = hashlib.sha3_256(enc_str)
r = h.digest()
binary_str = bin(int.from_bytes(r, byteorder="big"))[2 : 2 + (2 * (λ + 1))]
gen_list.append(list(map(int, binary_str)))
return th.tensor(gen_list, dtype=th.uint8).t()
def H(seed):
assert seed.shape[0] == λ
seed_t = seed.t().tolist()
gen_list = []
for seed_bit in seed_t:
enc_str = str(seed_bit).encode()
h = hashlib.sha3_256(enc_str)
r = h.digest()
binary_str = bin(int.from_bytes(r, byteorder="big"))[2 : 2 + 2 + (2 * (λ + 1))]
gen_list.append(list(map(int, binary_str)))
return th.tensor(gen_list, dtype=th.uint8).t()
def Convert(bits):
bit_pow_lambda = th.flip(2 ** th.arange(λ), (0,)).unsqueeze(-1).to(th.long)
return (bits.to(th.long) * bit_pow_lambda).sum(dim=0).to(dtype)
def Array(*shape):
return th.empty(shape, dtype=th.uint8)
bit_pow_n = th.flip(2 ** th.arange(n), (0,))
def bit_decomposition(x):
x = x.unsqueeze(-1)
z = bit_pow_n & x
z = z.t()
return (z > 0).to(th.uint8)
def randbit(size):
return th.randint(2, size=size)
def concat(*args, **kwargs):
return th.cat(args, **kwargs)
def split(x, idx):
return th.split(x, idx)
def TruthTableDPF(s, α_i):
one = th.ones((1, s.shape[1])).to(th.uint8)
s_one = concat(s, one)
Table = th.zeros((2, λ + 1, len(α_i)), dtype=th.uint8)
for j, el in enumerate(α_i):
Table[el.item(), :, j] = s_one[:, j]
return Table.reshape(-1, Table.shape[2])
def TruthTableDIF(s, α_i):
leafTable = th.zeros((2, 1, len(α_i)), dtype=th.uint8)
# TODO optimize: just put alpha on first line
leaf_value = α_i
for j, el in enumerate(α_i):
leafTable[(1 - el).item(), 0, j] = leaf_value[j]
one = th.ones((1, s.shape[1])).to(th.uint8)
s_one = concat(s, one)
nextTable = th.zeros((2, λ + 1, len(α_i)), dtype=th.uint8)
for j, el in enumerate(α_i):
nextTable[el.item(), :, j] = s_one[:, j]
Table = concat(leafTable, nextTable, axis=1)
Table = Table.reshape(-1, Table.shape[2])
return Table
| [
[
[
395,
402
],
[
10227,
10234
],
[
10639,
10646
]
],
[
[
411,
422
],
[
534,
536
],
[
11127,
11129
],
[
11140,
11142
],
[
3069,
3071
],
[
3919,
3921
],
[
5686,
5688
],
[
5730,
5732
],
[
6021,
6023
],
[
6058,
6060
],
[
6751,
6753
],
[
7567,
7569
],
[
7962,
7964
],
[
8256,
8258
],
[
8293,
8295
],
[
9009,
9011
],
[
9737,
9739
],
[
10424,
10426
],
[
10450,
10452
],
[
10840,
10842
],
[
10866,
10868
],
[
10922,
10924
],
[
10935,
10937
],
[
10973,
10975
],
[
11001,
11003
],
[
11081,
11083
],
[
11103,
11105
],
[
11270,
11272
],
[
11312,
11314
],
[
11379,
11381
],
[
11434,
11436
],
[
11490,
11492
],
[
11518,
11520
],
[
11567,
11569
],
[
11606,
11608
],
[
11782,
11784
],
[
11816,
11818
],
[
11997,
11999
],
[
12025,
12027
],
[
12078,
12080
],
[
12117,
12119
]
],
[
[
430,
440
],
[
771,
773
],
[
975,
977
],
[
1132,
1134
],
[
1344,
1346
],
[
1508,
1510
],
[
1669,
1671
],
[
1917,
1919
],
[
2643,
2645
],
[
4107,
4109
]
],
[
[
443,
445
],
[
5852,
5854
],
[
5931,
5933
],
[
5992,
5994
],
[
6240,
6242
],
[
6247,
6249
],
[
6295,
6297
],
[
6302,
6304
],
[
6579,
6581
],
[
6686,
6688
],
[
6845,
6847
],
[
7189,
7191
],
[
7446,
7448
],
[
7504,
7506
],
[
7651,
7653
],
[
8083,
8085
],
[
8166,
8168
],
[
8227,
8229
],
[
8487,
8489
],
[
8494,
8496
],
[
8554,
8556
],
[
8561,
8563
],
[
8837,
8839
],
[
8944,
8946
],
[
9115,
9117
],
[
9406,
9408
],
[
9616,
9618
],
[
9674,
9676
],
[
9833,
9835
],
[
10095,
10097
],
[
10351,
10353
],
[
10507,
10509
],
[
10767,
10769
],
[
10945,
10947
],
[
11580,
11582
],
[
12091,
12093
]
],
[
[
491,
492
],
[
11150,
11151
],
[
2989,
2990
],
[
5749,
5750
],
[
5842,
5843
],
[
5884,
5885
],
[
5923,
5924
],
[
6098,
6099
],
[
6877,
6878
],
[
6913,
6914
],
[
6932,
6933
],
[
7182,
7183
],
[
7210,
7211
],
[
7370,
7371
],
[
7702,
7703
],
[
7710,
7711
],
[
7728,
7729
],
[
7981,
7982
],
[
8073,
8074
],
[
8115,
8116
],
[
8154,
8155
],
[
8333,
8334
],
[
9361,
9362
],
[
9399,
9400
],
[
9427,
9428
],
[
9540,
9541
],
[
9945,
9946
],
[
9938,
9939
]
],
[
[
526,
531
],
[
5707,
5712
],
[
6886,
6891
],
[
11042,
11047
]
],
[
[
544,
551
]
],
[
[
578,
601
]
],
[
[
1817,
1833
],
[
2814,
2830
],
[
3116,
3132
],
[
3416,
3432
],
[
3767,
3783
]
],
[
[
2230,
2236
],
[
5443,
5449
],
[
5493,
5499
]
],
[
[
4209,
4221
],
[
814,
826
],
[
1175,
1187
]
],
[
[
4490,
4502
],
[
1005,
1017
]
],
[
[
4746,
4760
],
[
1374,
1388
]
],
[
[
4992,
5009
],
[
1538,
1555
]
],
[
[
5196,
5213
],
[
1699,
1716
]
],
[
[
5420,
5422
]
],
[
[
5470,
5472
]
],
[
[
5524,
5527
],
[
4670,
4673
]
],
[
[
7791,
7794
],
[
4930,
4933
]
],
[
[
10058,
10059
],
[
6119,
6120
],
[
6147,
6148
],
[
7391,
7392
]
],
[
[
10470,
10471
],
[
8354,
8355
],
[
8382,
8383
],
[
9561,
9562
]
],
[
[
10886,
10893
],
[
6903,
6910
],
[
6922,
6929
],
[
7692,
7699
]
],
[
[
11055,
11060
],
[
5836,
5841
],
[
5878,
5883
],
[
5917,
5922
],
[
7176,
7181
],
[
7204,
7209
],
[
8067,
8072
],
[
8109,
8114
],
[
8148,
8153
],
[
9355,
9360
],
[
9393,
9398
],
[
9421,
9426
]
],
[
[
11115,
11124
],
[
11220,
11229
]
],
[
[
11166,
11183
],
[
5779,
5796
],
[
7140,
7157
],
[
8010,
8027
],
[
9315,
9332
]
],
[
[
11286,
11293
],
[
5975,
5982
],
[
8210,
8217
]
],
[
[
11343,
11349
],
[
11540,
11546
],
[
12047,
12053
],
[
12220,
12226
]
],
[
[
11408,
11413
],
[
6229,
6234
],
[
6284,
6289
],
[
6825,
6830
],
[
7631,
7636
],
[
8470,
8475
],
[
8537,
8542
],
[
9092,
9097
],
[
9810,
9815
]
],
[
[
11457,
11470
],
[
6399,
6412
]
],
[
[
11743,
11756
],
[
8657,
8670
]
]
] |
#!/usr/bin/env python3
"""
Test for local-subnet identifier
"""
import unittest
import netifaces
from base_test import PschedTestBase
from pscheduler.limitprocessor.identifier.localsubnet import *
DATA = {
}
class TestLimitprocessorIdentifierLocalSubnet(PschedTestBase):
"""
Test the Identifier
"""
def test_data_is_valid(self):
"""Limit Processor / Identifier Local Subnet / Data Validation"""
self.assertEqual(data_is_valid(DATA), (True, "OK"))
self.assertEqual(data_is_valid({ "abc": 123 }),
(False, 'Data is not an object or not empty.'))
def test_identifier(self):
"""Limit Processor / Identifier Local Subnet / Identifier"""
test_ifaces = {
"lo0": {
netifaces.AF_INET: [
{'addr': '127.0.0.1', 'netmask': '255.0.0.0', 'peer': '127.0.0.1'}
],
netifaces.AF_INET6: [
{'addr': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128', 'peer': '::1', 'flags': 0},
{'addr': 'fe80::1%lo0', 'netmask': 'ffff:ffff:ffff:ffff::/64', 'flags': 0}
]
}
}
ident = IdentifierLocalSubnet(DATA, test_ifaces=test_ifaces)
self.assertEqual(
ident.evaluate({ "requester": "127.0.0.5" }),
True)
self.assertEqual(
ident.evaluate({ "requester": "fe80::1" }),
True)
self.assertEqual(
ident.evaluate({ "requester": "192.0.2.9" }),
False)
self.assertEqual(
ident.evaluate({ "requester": "2001:db8::1" }),
False)
if __name__ == '__main__':
unittest.main()
| [
[
[
72,
80
],
[
1726,
1734
]
],
[
[
88,
97
],
[
782,
791
],
[
925,
934
]
],
[
[
121,
135
],
[
262,
276
]
],
[
[
198,
199
],
[
454,
467
],
[
514,
527
],
[
1224,
1245
]
],
[
[
202,
206
],
[
468,
472
],
[
1246,
1250
]
],
[
[
222,
261
]
]
] |
from PuzzleLib.Cuda.Kernels.RadixSort import backendTest
def unittest():
from PuzzleLib.Hip import Backend
backendTest(Backend)
if __name__ == "__main__":
unittest()
| [
[
[
45,
56
],
[
111,
122
]
],
[
[
63,
71
],
[
162,
170
]
]
] |
import os
from setuptools import setup
README = """
See the README on `GitHub
<https://github.com/uw-it-aca/app_name>`_.
"""
# The VERSION file is created by travis-ci, based on the tag name
version_path = "app_name/VERSION"
print(os.path.join(os.path.dirname(__file__), version_path))
VERSION = open(os.path.join(os.path.dirname(__file__), version_path)).read()
VERSION = VERSION.replace("\n", "")
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
url = "https://github.com/uw-it-aca/app_name"
setup(
name="app_name",
version=VERSION,
packages=["app_name"],
author="UW-IT AXDD",
author_email="aca-it@uw.edu",
include_package_data=True,
install_requires=[
'django~=3.2',
"django-webpack-loader",
],
license="Apache License, Version 2.0",
description="A tool for visually displaying UW course prerequisites",
long_description=README,
url=url,
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
],
)
| [
[
[
7,
9
],
[
233,
235
],
[
246,
248
],
[
303,
305
],
[
316,
318
],
[
443,
445
],
[
452,
454
],
[
469,
471
],
[
482,
484
],
[
509,
511
]
],
[
[
33,
38
],
[
569,
574
]
],
[
[
40,
46
],
[
959,
965
]
],
[
[
193,
205
],
[
273,
285
],
[
343,
355
]
],
[
[
288,
295
],
[
375,
382
]
],
[
[
365,
372
],
[
609,
616
]
],
[
[
523,
526
],
[
975,
978
]
]
] |
from abc import abstractmethod
from ml import LabelStudioMLBase
class LabelStudioMLBaseHelper(LabelStudioMLBase):
@abstractmethod
def prepare_tasks(self, tasks, workdir=None, **kwargs):
pass
@abstractmethod
def convert_predictions(self, predictions, **kwargs):
pass
@abstractmethod
def predict2(self, X, y=None, **kwargs):
pass
@abstractmethod
def fit2(self, X, y, **kwargs):
pass
def predict(self, tasks, **kwargs):
X, y = self.prepare_tasks(tasks, **kwargs)
predictions = self.predict2(X, y, **kwargs)
result = self.convert_predictions(predictions, **kwargs)
return result
def fit(self, completions, workdir=None, **kwargs):
X, y = self.prepare_tasks(completions, workdir=workdir, **kwargs)
return self.fit2(X, y, **kwargs)
def _has_annotation(self, task):
return 'completions' in task
class LabelStudioMLChoices(LabelStudioMLBaseHelper):
def __init__(self, **kwargs):
super(LabelStudioMLChoices, self).__init__(**kwargs)
assert len(self.parsed_label_config) == 1
self.from_name, self.info = list(self.parsed_label_config.items())[0]
assert self.info['type'] == 'Choices'
assert len(self.info['to_name']) == 1
assert len(self.info['inputs']) == 1
self.to_name = self.info['to_name'][0]
self.value = self.info['inputs'][0]['value']
def prepare_tasks(self, tasks, workdir=None, **kwargs):
X, y = [], []
for task in tasks:
X.append(task['data'][self.value])
if self._has_annotation(task):
choices = task['completions'][0]['result'][0]['value']['choices']
y.append(choices)
else:
y.append(None)
return X, y
def convert_predictions(self, predictions, **kwargs):
list_choices, scores = predictions
results = []
for choices, score in zip(list_choices, scores):
result = [{
'from_name': self.from_name,
'to_name': self.to_name,
'type': 'choices',
'value': {'choices': choices}
}]
results.append({'result': result, 'score': score})
return results | [
[
[
16,
30
],
[
122,
136
],
[
216,
230
],
[
308,
322
],
[
387,
401
]
],
[
[
46,
63
],
[
96,
113
]
],
[
[
72,
95
],
[
958,
981
]
],
[
[
937,
957
],
[
1033,
1053
]
]
] |
import requests
import urllib
import time
import hashlib
import hmac
import itertools
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
from .api import Base
from .errors import ApiError, ArgumentError
def check_values(value, arg, arg_value):
if type(value) == type:
if type(arg_value) != value:
raise ArgumentError(u"Type of argument {} is invalid. It should be {}".format(arg, value))
elif arg_value not in value:
raise ArgumentError(u"Value of argument {} is invalid. It should be one of {}".format(arg, value))
def check_args(kwargs, required_parameters, optional_parameters={}):
args = kwargs.keys()
required_args = required_parameters.keys()
optional_args = optional_parameters.keys()
missing_args = list(set(required_args) - set(args))
if len(missing_args) > 0:
raise ArgumentError(u"Parameter {} is required".format(missing_args))
for arg_name, arg_value in kwargs.items():
if arg_name in optional_args:
optional_value = optional_parameters[arg_name]
check_values(optional_value, arg_name, arg_value)
elif arg_name in required_args:
required_value = required_parameters[arg_name]
check_values(required_value, arg_name, arg_value)
class TradeApi(Base):
def __init__(self, identifier=None, secret=None):
self.id = identifier
self.secret = secret
self.path = "/tapi/v3/"
self.available_pairs = ["BRLBTC", "BRLLTC", "BRLBCH", "BRLXRP", "BRLETH", "BRLUSDC", "BRLMBPRK01", "BRLMBPRK02", "BRLMBPRK03", "BRLMBPRK04", "BRLMBCONS01"]
Base.__init__(self)
def list_system_messages(self, level="INFO"):
"""https://www.mercadobitcoin.com.br/trade-api/#list_system_messages"""
payload = { "level": level }
check_args(payload, { "level": ["INFO", "WARNING", "ERROR"] })
return self.__check_response(self.__post_tapi("list_system_messages", payload))
def get_account_info(self):
"""https://www.mercadobitcoin.com.br/trade-api/#get_account_info"""
return self.__check_response(self.__post_tapi("get_account_info"))
def get_order(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#get_order"""
check_args(kwargs, { "coin_pair": self.available_pairs, "order_id": int })
return self.__check_response(self.__post_tapi("get_order", kwargs))
def list_orders(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#list_orders"""
check_args(kwargs, { "coin_pair": self.available_pairs }, { "order_type": [1, 2], "status_list": str, "has_fills": [True, False], "from_id": int, "to_id": int, "from_timestamp": str, "to_timestamp": str })
return self.__check_response(self.__post_tapi("list_orders", kwargs ))
def list_orderbook(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#list_orderbook"""
check_args(kwargs, { "coin_pair": self.available_pairs }, { "full": [True, False] })
return self.__check_response(self.__post_tapi("list_orderbook", kwargs ))
def place_buy_order(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#place_buy_order"""
check_args(kwargs, { "coin_pair": self.available_pairs, "quantity": str, "limit_price": str })
return self.__check_response(self.__post_tapi("place_buy_order", kwargs ))
def place_sell_order(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#place_sell_order"""
check_args(kwargs, { "coin_pair": self.available_pairs, "quantity": str, "limit_price": str })
return self.__check_response(self.__post_tapi("place_sell_order", kwargs ))
def cancel_order(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#cancel_order"""
check_args(kwargs, { "coin_pair": self.available_pairs, "order_id": int })
return self.__check_response(self.__post_tapi("cancel_order", kwargs ))
def get_withdrawal(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#get_withdrawal"""
check_args(kwargs, { "coin": self.available_pairs, "withdrawal_id": int })
return self.__check_response(self.__post_tapi("get_withdrawal", kwargs ))
def withdraw_coin_brl(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#withdraw_coin"""
check_args(kwargs, { "coin": ["BRL"], "quantity": str, "account_ref": str }, { "description": str })
return self.__check_response(self.__post_tapi("withdraw_coin", kwargs ))
def withdraw_coin(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#withdraw_coin"""
check_args(kwargs, { "coin": ["BTC", "LTC", "BCH", "ETH"], "quantity": str, "address": str, "tx_fee": str }, { "description": str })
return self.__check_response(self.__post_tapi("withdraw_coin", kwargs ))
def withdraw_coin_xrp(self, **kwargs):
"""https://www.mercadobitcoin.com.br/trade-api/#withdraw_coin"""
check_args(kwargs, { "coin": ["XRP"], "quantity": str, "address": str, "tx_fee": str, "destination_tag": int }, { "description": str })
return self.__check_response(self.__post_tapi("withdraw_coin", kwargs ))
def __check_response(self, response):
if response["status_code"] == 100:
return response["response_data"]
else:
raise ApiError(response["error_message"], response["status_code"])
def __post_tapi(self, method, params={}):
payload = { "tapi_method": method, "tapi_nonce": str(int(time.time()*1000000))}
payload.update(params)
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"TAPI-ID": self.id,
"TAPI-MAC": self.__signature(payload)
}
response = requests.post("https://{}{}".format(self.host, self.path), headers=headers, data=payload)
return response.json()
def __signature(self, payload):
signature = hmac.new(self.secret, digestmod=hashlib.sha512)
params = self.path + '?' + urlencode(payload)
signature.update(params.encode('utf-8'))
return signature.hexdigest()
| [
[
[
7,
15
],
[
5928,
5936
]
],
[
[
23,
29
]
],
[
[
37,
41
],
[
5676,
5680
]
],
[
[
49,
56
],
[
6139,
6146
]
],
[
[
64,
68
],
[
6107,
6111
]
],
[
[
76,
85
]
],
[
[
121,
130
],
[
6190,
6199
]
],
[
[
174,
183
],
[
6190,
6199
]
],
[
[
202,
206
],
[
1342,
1346
],
[
1665,
1669
]
],
[
[
227,
235
],
[
5502,
5510
]
],
[
[
237,
250
],
[
377,
390
],
[
509,
522
],
[
893,
906
]
],
[
[
257,
269
],
[
1114,
1126
],
[
1275,
1287
]
],
[
[
608,
618
],
[
1863,
1873
],
[
2314,
2324
],
[
2584,
2594
],
[
3002,
3012
],
[
3296,
3306
],
[
3603,
3613
],
[
3903,
3913
],
[
4183,
4193
],
[
4467,
4477
],
[
4772,
4782
],
[
5113,
5123
]
],
[
[
1333,
1341
]
]
] |
from umonitor import __version__
def test_version():
assert __version__ == '0.1.5'
| [
[
[
21,
32
],
[
66,
77
]
],
[
[
39,
51
]
]
] |
#!/usr/bin/env python3
import sys, utils, random # import the modules we will need
utils.check_version((3,7)) # make sure we are running at least Python 3.7
utils.clear() # clear the screen
print('Greetings!') # prints out "Greetings!" in the terminal.
colors = ['red','orange','yellow','green','blue','violet','purple'] # creates a list of colors which will be saved for future use.
play_again = '' # creates a variable called "play_again" that is just a space at the moemnt
best_count = sys.maxsize # the biggest number, which makes it so that the first time they play the game, they will get their best guess so far.
while (play_again != 'n' and play_again != 'no'): # will repeat the game, as long as the player has not responded negatively to playing again.
match_color = random.choice(colors) # the program picks a random color from the list we created earlier so the game is different every time.
count = 0 # starts a counter at 0 that will be used to check how many attempts the user had to go through in order to guess the correct color
color = '' # creates the variable color, which will soon be replaced by the user's input.
while (color != match_color): # will run this loop while the color does not match the randomly selected color
color = input("\nWhat is my favorite color? ") #\n is a special code that adds a new line this is also taking an input from the user after printing "What is my favorite color?" in the window.
color = color.lower().strip() # this line takes the user's guessed color and strips it of spaces as well as downcasing all letters
count += 1 # this adds one to the count variable, tracking that the user just made a guess.
if (color == match_color): # checks if the guessed color matches the randomly selected color.
print('Correct!') # if so the program will print "Correct!"
else: # if the above check does not return true, the program will run what falls under this line.
print('Sorry, try again. You have guessed {guesses} times.'.format(guesses=count)) # the program prints the text within the quotes while replacing {guesses} with the variable saved in count
print('\nYou guessed it in {0} tries!'.format(count)) #the program prints the text within the counts and replaces {0} with the variable stored in count
if (count < best_count): # checks if the player had to use less guesses then their best run of this game so far.
print('This was your best guess so far!') # if the above check returns true, then the program prints the text within the quotes.
best_count = count # if the above check returns true, the current count for this game replaces best_count as the new record.
play_again = input("\nWould you like to play again? ").lower().strip() #checks if the player would like to play again, and strips and downcases the input to save as the play_again input
print('Thanks for playing!') #once the player has ended the game by responded with "n" or "no" the program prints the text with quotes on this line. | [
[
[
31,
34
],
[
534,
537
]
],
[
[
36,
41
],
[
95,
100
],
[
178,
183
]
],
[
[
43,
49
],
[
837,
843
]
],
[
[
298,
304
],
[
851,
857
]
],
[
[
429,
439
],
[
683,
693
],
[
705,
715
]
],
[
[
521,
531
],
[
2417,
2427
]
],
[
[
823,
834
],
[
1224,
1235
],
[
1784,
1795
]
],
[
[
968,
973
],
[
1671,
1676
],
[
2295,
2300
],
[
2409,
2414
],
[
2676,
2681
]
],
[
[
1114,
1119
],
[
1215,
1220
]
],
[
[
1326,
1331
],
[
1540,
1545
]
],
[
[
1532,
1537
],
[
1775,
1780
],
[
1215,
1220
]
],
[
[
2663,
2673
],
[
2417,
2427
]
],
[
[
2792,
2802
],
[
683,
693
],
[
705,
715
]
]
] |
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Class for bitcoinexodusd node under test"""
import contextlib
import decimal
import errno
from enum import Enum
import http.client
import json
import logging
import os
import re
import subprocess
import tempfile
import time
import urllib.parse
from .authproxy import JSONRPCException
from .util import (
append_config,
delete_cookie_file,
get_rpc_proxy,
rpc_url,
wait_until,
p2p_port,
)
# For Python 3.4 compatibility
JSONDecodeError = getattr(json, "JSONDecodeError", ValueError)
BITCOINEXODUSD_PROC_WAIT_TIMEOUT = 60
class FailedToStartError(Exception):
"""Raised when a node fails to start correctly."""
class ErrorMatch(Enum):
FULL_TEXT = 1
FULL_REGEX = 2
PARTIAL_REGEX = 3
class TestNode():
"""A class for representing a bitcoinexodusd node under test.
This class contains:
- state about the node (whether it's running, etc)
- a Python subprocess.Popen object representing the running process
- an RPC connection to the node
- one or more P2P connections to the node
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
def __init__(self, i, datadir, *, rpchost, timewait, bitcoinexodusd, bitcoinexodus_cli, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False):
self.index = i
self.datadir = datadir
self.stdout_dir = os.path.join(self.datadir, "stdout")
self.stderr_dir = os.path.join(self.datadir, "stderr")
self.rpchost = rpchost
self.rpc_timeout = timewait
self.binary = bitcoinexodusd
self.coverage_dir = coverage_dir
if extra_conf != None:
append_config(datadir, extra_conf)
# Most callers will just need to add extra args to the standard list below.
# For those callers that need more flexibility, they can just set the args property directly.
# Note that common args are set in the config file (see initialize_datadir)
self.extra_args = extra_args
self.args = [
self.binary,
"-datadir=" + self.datadir,
"-logtimemicros",
"-debug",
"-debugexclude=libevent",
"-debugexclude=leveldb",
"-mocktime=" + str(mocktime),
"-uacomment=testnode%d" % i
]
self.cli = TestNodeCLI(bitcoinexodus_cli, self.datadir)
self.use_cli = use_cli
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.url = None
self.log = logging.getLogger('TestFramework.node%d' % i)
self.cleanup_on_exit = True # Whether to kill the node when this object goes away
self.p2ps = []
def get_deterministic_priv_key(self):
"""Return a deterministic priv key in base58, that only depends on the node's index"""
PRIV_KEYS = [
# adress , privkey
('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'),
('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'),
('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'),
('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'),
('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'),
('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'),
('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'),
('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'),
('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'),
]
return PRIV_KEYS[self.index]
def _node_msg(self, msg: str) -> str:
"""Return a modified msg that identifies this node by its index as a debugging aid."""
return "[node %d] %s" % (self.index, msg)
def _raise_assertion_error(self, msg: str):
"""Raise an AssertionError with msg modified to identify this node."""
raise AssertionError(self._node_msg(msg))
def __del__(self):
# Ensure that we don't leave any bitcoinexodusd processes lying around after
# the test ends
if self.process and self.cleanup_on_exit:
# Should only happen on test failure
# Avoid using logger, as that may have already been shutdown when
# this destructor is called.
print(self._node_msg("Cleaning up leftover process"))
self.process.kill()
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
return getattr(self.cli, name)
else:
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
return getattr(self.rpc, name)
def start(self, extra_args=None, *, stdout=None, stderr=None, **kwargs):
"""Start the node."""
if extra_args is None:
extra_args = self.extra_args
# Add a new stdout and stderr file each time bitcoinexodusd is started
if stderr is None:
stderr = tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False)
if stdout is None:
stdout = tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False)
self.stderr = stderr
self.stdout = stdout
# Delete any existing cookie file -- if such a file exists (eg due to
# unclean shutdown), it will get overwritten anyway by bitcoinexodusd, and
# potentially interfere with our attempt to authenticate
delete_cookie_file(self.datadir)
# add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal
subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1")
self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, **kwargs)
self.running = True
self.log.debug("bitcoinexodusd started, waiting for RPC to come up")
def wait_for_rpc_connection(self):
"""Sets up an RPC connection to the bitcoinexodusd process. Returns False if unable to connect."""
# Poll at a rate of four times per second
poll_per_s = 4
for _ in range(poll_per_s * self.rpc_timeout):
if self.process.poll() is not None:
raise FailedToStartError(self._node_msg(
'bitcoinexodusd exited with status {} during initialization'.format(self.process.returncode)))
try:
self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
self.rpc.getblockcount()
# If the call to getblockcount() succeeds then the RPC connection is up
self.rpc_connected = True
self.url = self.rpc.url
self.log.debug("RPC successfully started")
return
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoinexodusd still starting
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
self._raise_assertion_error("Unable to connect to bitcoinexodusd")
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
return self.cli("-rpcwallet={}".format(wallet_name))
else:
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
return self.rpc / wallet_path
def stop_node(self, expected_stderr=''):
"""Stop the node."""
if not self.running:
return
self.log.debug("Stopping node")
try:
self.stop()
except http.client.CannotSendRequest:
self.log.exception("Unable to stop node.")
# Check that stderr is as expected
self.stderr.seek(0)
stderr = self.stderr.read().decode('utf-8').strip()
if stderr != expected_stderr:
raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr))
self.stdout.close()
self.stderr.close()
del self.p2ps[:]
def is_node_stopped(self):
"""Checks whether the node has stopped.
Returns True if the node has stopped. False otherwise.
This method is responsible for freeing resources (self.process)."""
if not self.running:
return True
return_code = self.process.poll()
if return_code is None:
return False
# process has stopped. Assert that it didn't return an error code.
assert return_code == 0, self._node_msg(
"Node returned non-zero exit code (%d) when stopping" % return_code)
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.log.debug("Node stopped")
return True
def wait_until_stopped(self, timeout=BITCOINEXODUSD_PROC_WAIT_TIMEOUT):
wait_until(self.is_node_stopped, timeout=timeout)
@contextlib.contextmanager
def assert_debug_log(self, expected_msgs):
debug_log = os.path.join(self.datadir, 'regtest', 'debug.log')
with open(debug_log, encoding='utf-8') as dl:
dl.seek(0, 2)
prev_size = dl.tell()
try:
yield
finally:
with open(debug_log, encoding='utf-8') as dl:
dl.seek(prev_size)
log = dl.read()
print_log = " - " + "\n - ".join(log.splitlines())
for expected_msg in expected_msgs:
if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None:
self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log))
def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs):
"""Attempt to start the node and expect it to raise an error.
extra_args: extra arguments to pass through to bitcoinexodusd
expected_msg: regex that stderr should match when bitcoinexodusd fails
Will throw if bitcoinexodusd starts without an error.
Will throw if an expected_msg is provided and it does not match bitcoinexodusd's stdout."""
with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \
tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout:
try:
self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs)
self.wait_for_rpc_connection()
self.stop_node()
self.wait_until_stopped()
except FailedToStartError as e:
self.log.debug('bitcoinexodusd failed to start: %s', e)
self.running = False
self.process = None
# Check stderr for expected message
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8').strip()
if match == ErrorMatch.PARTIAL_REGEX:
if re.search(expected_msg, stderr, flags=re.MULTILINE) is None:
self._raise_assertion_error(
'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr))
elif match == ErrorMatch.FULL_REGEX:
if re.fullmatch(expected_msg, stderr) is None:
self._raise_assertion_error(
'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr))
elif match == ErrorMatch.FULL_TEXT:
if expected_msg != stderr:
self._raise_assertion_error(
'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr))
else:
if expected_msg is None:
assert_msg = "bitcoinexodusd should have exited with an error"
else:
assert_msg = "bitcoinexodusd should have exited with expected error " + expected_msg
self._raise_assertion_error(assert_msg)
def node_encrypt_wallet(self, passphrase):
""""Encrypts the wallet.
This causes bitcoinexodusd to shutdown, so this method takes
care of cleaning up resources."""
self.encryptwallet(passphrase)
self.wait_until_stopped()
def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs):
"""Add a p2p connection to the node.
This method adds the p2p connection to the self.p2ps list and also
returns the connection to the caller."""
if 'dstport' not in kwargs:
kwargs['dstport'] = p2p_port(self.index)
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
p2p_conn.peer_connect(**kwargs)()
self.p2ps.append(p2p_conn)
if wait_for_verack:
p2p_conn.wait_for_verack()
return p2p_conn
@property
def p2p(self):
"""Return the first p2p connection
Convenience property - most tests only use a single p2p connection to each
node, so this saves having to write node.p2ps[0] many times."""
assert self.p2ps, self._node_msg("No p2p connection")
return self.p2ps[0]
def disconnect_p2ps(self):
"""Close all p2p connections to the node."""
for p in self.p2ps:
p.peer_disconnect()
del self.p2ps[:]
class TestNodeCLIAttr:
def __init__(self, cli, command):
self.cli = cli
self.command = command
def __call__(self, *args, **kwargs):
return self.cli.send_cli(self.command, *args, **kwargs)
def get_request(self, *args, **kwargs):
return lambda: self(*args, **kwargs)
class TestNodeCLI():
"""Interface to bitcoinexodus-cli for an individual node"""
def __init__(self, binary, datadir):
self.options = []
self.binary = binary
self.datadir = datadir
self.input = None
self.log = logging.getLogger('TestFramework.bitcoinexoduscli')
def __call__(self, *options, input=None):
# TestNodeCLI is callable with bitcoinexodus-cli command-line options
cli = TestNodeCLI(self.binary, self.datadir)
cli.options = [str(o) for o in options]
cli.input = input
return cli
def __getattr__(self, command):
return TestNodeCLIAttr(self, command)
def batch(self, requests):
results = []
for request in requests:
try:
results.append(dict(result=request()))
except JSONRPCException as e:
results.append(dict(error=e))
return results
def send_cli(self, command=None, *args, **kwargs):
"""Run bitcoinexodus-cli command. Deserializes returned string as python object."""
pos_args = [str(arg).lower() if type(arg) is bool else str(arg) for arg in args]
named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()]
assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoinexodus-cli call"
p_args = [self.binary, "-datadir=" + self.datadir] + self.options
if named_args:
p_args += ["-named"]
if command is not None:
p_args += [command]
p_args += pos_args + named_args
self.log.debug("Running bitcoinexodus-cli command: %s" % command)
process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
cli_stdout, cli_stderr = process.communicate(input=self.input)
returncode = process.poll()
if returncode:
match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr)
if match:
code, message = match.groups()
raise JSONRPCException(dict(code=int(code), message=message))
# Ignore cli_stdout, raise with cli_stderr
raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr)
try:
return json.loads(cli_stdout, parse_float=decimal.Decimal)
except JSONDecodeError:
return cli_stdout.rstrip("\n")
| [
[
[
269,
279
],
[
10099,
10109
]
],
[
[
287,
294
],
[
17520,
17527
]
],
[
[
302,
307
],
[
7611,
7616
]
],
[
[
325,
329
],
[
877,
881
]
],
[
[
337,
348
],
[
8771,
8775
]
],
[
[
356,
360
],
[
688,
692
],
[
17485,
17489
]
],
[
[
368,
375
],
[
2828,
2835
],
[
15360,
15367
]
],
[
[
383,
385
],
[
1636,
1638
],
[
1699,
1701
],
[
6323,
6325
],
[
10192,
10194
]
],
[
[
393,
395
],
[
10659,
10661
],
[
10669,
10671
],
[
10705,
10707
],
[
12282,
12284
],
[
12320,
12322
],
[
12609,
12611
],
[
17090,
17092
]
],
[
[
403,
413
],
[
6383,
6393
],
[
16819,
16829
],
[
16850,
16860
],
[
16874,
16884
],
[
16898,
16908
],
[
17379,
17389
]
],
[
[
421,
429
],
[
5679,
5687
],
[
5790,
5798
],
[
11397,
11405
],
[
11490,
11498
]
],
[
[
437,
441
],
[
8096,
8100
]
],
[
[
449,
461
],
[
8481,
8487
]
],
[
[
486,
502
],
[
7718,
7734
],
[
15943,
15959
],
[
17250,
17266
]
],
[
[
527,
540
],
[
1924,
1937
]
],
[
[
546,
564
],
[
6146,
6164
]
],
[
[
570,
583
],
[
7123,
7136
]
],
[
[
589,
596
],
[
7137,
7144
]
],
[
[
602,
612
],
[
10043,
10053
]
],
[
[
618,
626
],
[
14025,
14033
]
],
[
[
662,
677
],
[
17552,
17567
]
],
[
[
726,
758
],
[
10000,
10032
]
],
[
[
772,
790
],
[
6929,
6947
],
[
11820,
11838
]
],
[
[
866,
876
],
[
10961,
10971
],
[
12229,
12239
],
[
12559,
12569
],
[
12865,
12875
]
],
[
[
951,
959
]
],
[
[
14796,
14811
],
[
15735,
15750
]
],
[
[
15108,
15119
],
[
2592,
2603
],
[
15551,
15562
]
]
] |
import warnings
import numpy as np
from skimage import img_as_float
from skimage.util.dtype import dtype_range, dtype_limits
from skimage._shared.utils import deprecated
__all__ = ['histogram', 'cumulative_distribution', 'equalize',
'rescale_intensity', 'adjust_gamma',
'adjust_log', 'adjust_sigmoid']
def histogram(image, nbins=256):
"""Return histogram of image.
Unlike `numpy.histogram`, this function returns the centers of bins and
does not rebin integer arrays. For integer arrays, each integer value has
its own bin, which improves speed and intensity-resolution.
The histogram is computed on the flattened image: for color images, the
function should be used separately on each channel to obtain a histogram
for each color channel.
Parameters
----------
image : array
Input image.
nbins : int
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
hist : array
The values of the histogram.
bin_centers : array
The values at the center of the bins.
Examples
--------
>>> from skimage import data, exposure, util
>>> image = util.img_as_float(data.camera())
>>> np.histogram(image, bins=2)
(array([107432, 154712]), array([ 0. , 0.5, 1. ]))
>>> exposure.histogram(image, nbins=2)
(array([107432, 154712]), array([ 0.25, 0.75]))
"""
sh = image.shape
if len(sh) == 3 and sh[-1] < 4:
warnings.warn("This might be a color image. The histogram will be "
"computed on the flattened image. You can instead "
"apply this function to each color channel.")
# For integer types, histogramming with bincount is more efficient.
if np.issubdtype(image.dtype, np.integer):
offset = 0
if np.min(image) < 0:
offset = np.min(image)
hist = np.bincount(image.ravel() - offset)
bin_centers = np.arange(len(hist)) + offset
# clip histogram to start with a non-zero bin
idx = np.nonzero(hist)[0][0]
return hist[idx:], bin_centers[idx:]
else:
hist, bin_edges = np.histogram(image.flat, nbins)
bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2.
return hist, bin_centers
def cumulative_distribution(image, nbins=256):
"""Return cumulative distribution function (cdf) for the given image.
Parameters
----------
image : array
Image array.
nbins : int
Number of bins for image histogram.
Returns
-------
img_cdf : array
Values of cumulative distribution function.
bin_centers : array
Centers of bins.
References
----------
.. [1] http://en.wikipedia.org/wiki/Cumulative_distribution_function
"""
hist, bin_centers = histogram(image, nbins)
img_cdf = hist.cumsum()
img_cdf = img_cdf / float(img_cdf[-1])
return img_cdf, bin_centers
@deprecated('equalize_hist')
def equalize(image, nbins=256):
return equalize_hist(image, nbins)
def equalize_hist(image, nbins=256):
"""Return image after histogram equalization.
Parameters
----------
image : array
Image array.
nbins : int
Number of bins for image histogram.
Returns
-------
out : float array
Image array after histogram equalization.
Notes
-----
This function is adapted from [1]_ with the author's permission.
References
----------
.. [1] http://www.janeriksolem.net/2009/06/histogram-equalization-with-python-and.html
.. [2] http://en.wikipedia.org/wiki/Histogram_equalization
"""
image = img_as_float(image)
cdf, bin_centers = cumulative_distribution(image, nbins)
out = np.interp(image.flat, bin_centers, cdf)
return out.reshape(image.shape)
def rescale_intensity(image, in_range=None, out_range=None):
"""Return image after stretching or shrinking its intensity levels.
The image intensities are uniformly rescaled such that the minimum and
maximum values given by `in_range` match those given by `out_range`.
Parameters
----------
image : array
Image array.
in_range : 2-tuple (float, float)
Min and max *allowed* intensity values of input image. If None, the
*allowed* min/max values are set to the *actual* min/max values in the
input image.
out_range : 2-tuple (float, float)
Min and max intensity values of output image. If None, use the min/max
intensities of the image data type. See `skimage.util.dtype` for
details.
Returns
-------
out : array
Image array after rescaling its intensity. This image is the same dtype
as the input image.
Examples
--------
By default, intensities are stretched to the limits allowed by the dtype:
>>> image = np.array([51, 102, 153], dtype=np.uint8)
>>> rescale_intensity(image)
array([ 0, 127, 255], dtype=uint8)
It's easy to accidentally convert an image dtype from uint8 to float:
>>> 1.0 * image
array([ 51., 102., 153.])
Use `rescale_intensity` to rescale to the proper range for float dtypes:
>>> image_float = 1.0 * image
>>> rescale_intensity(image_float)
array([ 0. , 0.5, 1. ])
To maintain the low contrast of the original, use the `in_range` parameter:
>>> rescale_intensity(image_float, in_range=(0, 255))
array([ 0.2, 0.4, 0.6])
If the min/max value of `in_range` is more/less than the min/max image
intensity, then the intensity levels are clipped:
>>> rescale_intensity(image_float, in_range=(0, 102))
array([ 0.5, 1. , 1. ])
If you have an image with signed integers but want to rescale the image to
just the positive range, use the `out_range` parameter:
>>> image = np.array([-10, 0, 10], dtype=np.int8)
>>> rescale_intensity(image, out_range=(0, 127))
array([ 0, 63, 127], dtype=int8)
"""
dtype = image.dtype.type
if in_range is None:
imin = np.min(image)
imax = np.max(image)
else:
imin, imax = in_range
if out_range is None:
omin, omax = dtype_range[dtype]
if imin >= 0:
omin = 0
else:
omin, omax = out_range
image = np.clip(image, imin, imax)
image = (image - imin) / float(imax - imin)
return dtype(image * (omax - omin) + omin)
def _assert_non_negative(image):
if np.any(image < 0):
raise ValueError('Image Correction methods work correctly only on '
'images with non-negative values. Use '
'skimage.exposure.rescale_intensity.')
def adjust_gamma(image, gamma=1, gain=1):
"""Performs Gamma Correction on the input image.
Also known as Power Law Transform.
This function transforms the input image pixelwise according to the
equation ``O = I**gamma`` after scaling each pixel to the range 0 to 1.
Parameters
----------
image : ndarray
Input image.
gamma : float
Non negative real number. Default value is 1.
gain : float
The constant multiplier. Default value is 1.
Returns
-------
out : ndarray
Gamma corrected output image.
Notes
-----
For gamma greater than 1, the histogram will shift towards left and
the output image will be darker than the input image.
For gamma less than 1, the histogram will shift towards right and
the output image will be brighter than the input image.
References
----------
.. [1] http://en.wikipedia.org/wiki/Gamma_correction
"""
_assert_non_negative(image)
dtype = image.dtype.type
if gamma < 0:
return "Gamma should be a non-negative real number"
scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0])
out = ((image / scale) ** gamma) * scale * gain
return dtype(out)
def adjust_log(image, gain=1, inv=False):
"""Performs Logarithmic correction on the input image.
This function transforms the input image pixelwise according to the
equation ``O = gain*log(1 + I)`` after scaling each pixel to the range 0 to 1.
For inverse logarithmic correction, the equation is ``O = gain*(2**I - 1)``.
Parameters
----------
image : ndarray
Input image.
gain : float
The constant multiplier. Default value is 1.
inv : float
If True, it performs inverse logarithmic correction,
else correction will be logarithmic. Defaults to False.
Returns
-------
out : ndarray
Logarithm corrected output image.
References
----------
.. [1] http://www.ece.ucsb.edu/Faculty/Manjunath/courses/ece178W03/EnhancePart1.pdf
"""
_assert_non_negative(image)
dtype = image.dtype.type
scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0])
if inv:
out = (2 ** (image / scale) - 1) * scale * gain
return dtype(out)
out = np.log2(1 + image / scale) * scale * gain
return dtype(out)
def adjust_sigmoid(image, cutoff=0.5, gain=10, inv=False):
"""Performs Sigmoid Correction on the input image.
Also known as Contrast Adjustment.
This function transforms the input image pixelwise according to the
equation ``O = 1/(1 + exp*(gain*(cutoff - I)))`` after scaling each pixel
to the range 0 to 1.
Parameters
----------
image : ndarray
Input image.
cutoff : float
Cutoff of the sigmoid function that shifts the characteristic curve
in horizontal direction. Default value is 0.5.
gain : float
The constant multiplier in exponential's power of sigmoid function.
Default value is 10.
inv : bool
If True, returns the negative sigmoid correction. Defaults to False.
Returns
-------
out : ndarray
Sigmoid corrected output image.
References
----------
.. [1] Gustav J. Braun, "Image Lightness Rescaling Using Sigmoidal Contrast
Enhancement Functions",
http://www.cis.rit.edu/fairchild/PDFs/PAP07.pdf
"""
_assert_non_negative(image)
dtype = image.dtype.type
scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0])
if inv:
out = (1 - 1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale
return dtype(out)
out = (1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale
return dtype(out)
| [
[
[
7,
15
],
[
1521,
1529
]
],
[
[
23,
34
],
[
1811,
1813
],
[
1838,
1840
],
[
1881,
1883
],
[
1921,
1923
],
[
1950,
1952
],
[
2008,
2010
],
[
2107,
2109
],
[
2211,
2213
],
[
3804,
3806
],
[
6107,
6109
],
[
6136,
6138
],
[
6354,
6356
],
[
6520,
6522
],
[
9080,
9082
],
[
10389,
10391
],
[
10484,
10486
]
],
[
[
56,
68
],
[
3713,
3725
]
],
[
[
100,
111
],
[
6238,
6249
]
],
[
[
113,
125
],
[
7862,
7874
],
[
7893,
7905
],
[
8913,
8925
],
[
8944,
8956
],
[
10287,
10299
],
[
10318,
10330
]
],
[
[
160,
170
],
[
3002,
3012
]
],
[
[
173,
180
]
],
[
[
333,
342
],
[
2872,
2881
]
],
[
[
2342,
2365
],
[
3756,
3779
]
],
[
[
3034,
3042
]
],
[
[
3107,
3120
],
[
3073,
3086
]
],
[
[
3886,
3903
]
],
[
[
6483,
6503
],
[
7707,
7727
],
[
8838,
8858
],
[
10212,
10232
]
],
[
[
6750,
6762
]
],
[
[
8004,
8014
]
],
[
[
9150,
9164
]
]
] |
# Copyright 2020-2022 OpenDR European Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import shutil
import torch
from opendr.perception.object_tracking_2d import ObjectTracking2DDeepSortLearner
from opendr.perception.object_tracking_2d import (
Market1501Dataset,
Market1501DatasetIterator,
)
from opendr.perception.object_tracking_2d import (
MotDataset,
RawMotWithDetectionsDatasetIterator,
)
import os
DEVICE = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu'
print("Using device:", DEVICE)
print("Using device:", DEVICE, file=sys.stderr)
def rmfile(path):
try:
os.remove(path)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
def rmdir(_dir):
try:
shutil.rmtree(_dir)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
class TestObjectTracking2DDeepSortLearner(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.temp_dir = os.path.join("tests", "sources", "tools",
"perception", "object_tracking_2d",
"deep_sort",
"deep_sort_temp")
cls.train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
cls.model_names = [
"deep_sort",
]
cls.mot_dataset_path = MotDataset.download_nano_mot20(
os.path.join(cls.temp_dir, "mot_dataset"), True
).path
cls.market1501_dataset_path = Market1501Dataset.download_nano_market1501(
os.path.join(cls.temp_dir, "market1501_dataset"), True
).path
print("Dataset downloaded", file=sys.stderr)
for model_name in cls.model_names:
ObjectTracking2DDeepSortLearner.download(
model_name, cls.temp_dir
)
print("Models downloaded", file=sys.stderr)
@classmethod
def tearDownClass(cls):
# Clean up downloaded files
rmdir(os.path.join(cls.temp_dir))
def test_fit(self):
def test_model(name):
dataset = Market1501Dataset(self.market1501_dataset_path)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
learner.fit(
dataset,
epochs=2,
val_epochs=2,
verbose=True,
)
new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
self.assertFalse(torch.equal(starting_param, new_param))
print("Fit", name, "ok", file=sys.stderr)
for name in self.model_names:
test_model(name)
def test_fit_iterator(self):
def test_model(name):
dataset = Market1501DatasetIterator(
os.path.join(self.market1501_dataset_path, "bounding_box_train"),
)
eval_dataset = Market1501DatasetIterator(
os.path.join(self.market1501_dataset_path, "bounding_box_test"),
)
learner = ObjectTracking2DDeepSortLearner(
checkpoint_after_iter=3,
temp_path=self.temp_dir,
device=DEVICE,
)
starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
learner.fit(
dataset,
epochs=2,
val_dataset=eval_dataset,
val_epochs=2,
verbose=True,
)
new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
self.assertFalse(torch.equal(starting_param, new_param))
print("Fit iterator", name, "ok", file=sys.stderr)
for name in self.model_names:
test_model(name)
def test_eval(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, name)
train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
dataset = RawMotWithDetectionsDatasetIterator(
self.mot_dataset_path,
train_split_paths
)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.load(model_path, verbose=True)
result = learner.eval(dataset)
self.assertGreater(len(result["mota"]), 0)
for name in self.model_names:
test_model(name)
def test_infer(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, name)
train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
dataset = RawMotWithDetectionsDatasetIterator(
self.mot_dataset_path,
train_split_paths
)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.load(model_path, verbose=True)
result = learner.infer(dataset[0][0], 1)
self.assertTrue(len(result) > 0)
learner.reset()
result = learner.infer([
dataset[0][0],
dataset[1][0],
])
self.assertTrue(len(result) == 2)
self.assertTrue(len(result[0]) > 0)
for name in self.model_names:
test_model(name)
def test_save(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, "test_save_" + name)
save_path = os.path.join(model_path, "save")
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.save(save_path, True)
starting_param_1 = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
learner2 = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner2.load(save_path)
new_param = list(learner2.tracker.deepsort.extractor.net.parameters())[0].clone()
self.assertTrue(torch.equal(starting_param_1, new_param))
for name in self.model_names:
test_model(name)
def test_optimize(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, name)
train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
dataset = RawMotWithDetectionsDatasetIterator(
self.mot_dataset_path,
train_split_paths
)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.load(model_path, verbose=True)
learner.optimize()
result = learner.eval(dataset)
self.assertGreater(len(result["mota"]), 0)
for name in self.model_names:
test_model(name)
if __name__ == "__main__":
unittest.main()
| [
[
[
600,
603
],
[
1097,
1100
],
[
2373,
2376
],
[
2579,
2582
],
[
3439,
3442
],
[
4566,
4569
]
],
[
[
611,
619
],
[
1432,
1440
],
[
8452,
8460
]
],
[
[
627,
633
],
[
1283,
1289
]
],
[
[
641,
646
],
[
3356,
3361
],
[
4474,
4479
],
[
7403,
7408
]
],
[
[
696,
727
],
[
2441,
2472
],
[
2865,
2896
],
[
3900,
3931
],
[
5180,
5211
],
[
6052,
6083
],
[
6838,
6869
],
[
7124,
7155
],
[
8051,
8082
]
],
[
[
783,
800
],
[
2205,
2222
],
[
2794,
2811
]
],
[
[
806,
831
],
[
3605,
3630
],
[
3755,
3780
]
],
[
[
890,
900
],
[
2060,
2070
]
],
[
[
906,
941
],
[
5033,
5068
],
[
5905,
5940
],
[
7904,
7939
]
],
[
[
952,
954
],
[
993,
995
],
[
965,
967
],
[
1146,
1148
],
[
1517,
1519
],
[
1795,
1797
],
[
2104,
2106
],
[
2261,
2263
],
[
2688,
2690
],
[
3648,
3650
],
[
3798,
3800
],
[
4727,
4729
],
[
4825,
4827
],
[
5599,
5601
],
[
5697,
5699
],
[
6709,
6711
],
[
6782,
6784
],
[
7598,
7600
],
[
7696,
7698
]
],
[
[
956,
962
],
[
1053,
1059
],
[
1084,
1090
],
[
2962,
2968
],
[
4038,
4044
],
[
5277,
5283
],
[
6149,
6155
],
[
6935,
6941
],
[
7221,
7227
],
[
8148,
8154
]
],
[
[
1115,
1121
]
],
[
[
1253,
1258
],
[
2682,
2687
]
],
[
[
1396,
1431
]
]
] |
# coding: utf-8
"""Python data types for IB Flex format XML data.
These class definitions are introspected by ibflex.parser to type-convert
IB data. They're dataclasses, made immutable by passing `Frozen=True` to the
class decorator. Class attributes are annotated with PEP 484 type hints.
Except for the top-level XML elements, i.e. <FlexQueryResponse>,
<FlexStatements>, and <FlexStatement>, the Flex format cleanly differentiates
between data-bearing elements and container elements. Data elements hold
their values in XML element attributes; container elements are sequences
of child elements (usually data elements, but sometimes other containers).
XML element attributes are represented by class attributes hinted with the
Python type to which their values should be converted. Almost all are marked
`Optional`, since Flex report configuration allows any of them to be included
or omitted individually. Default value is `None` for a single value, or an
empty tuple for a sequence.
Specifically defined enums are an exception; the parser handles missing values
for them, so you shouldn't specify a default value. The enums therefore need
to come first in the class definition to avoid offending dataclass.
Some data elements have XML attributes whose values are sequences delimited by
commas or semicolons. These are represented as by class attributes hinted as
a variable-length `Tuple` of their sequence item type (`str` or an Enum type).
XML container elements are represented as variable-length `Tuple` of contained
child type.
TODO - need types for:
FdicInsuredDepositsByBank
ComplexPositions
HKIPOSubscriptionActivity
PendingExcercises
FxTransactions
UnbookedTrades
RoutingCommissions
IBGNoteTransactions
Adjustments
SoftDollars
CFDCharges
SLBOpenContracts
HKIPOOpenSubscriptions
"""
# PEP 563 compliance
# https://www.python.org/dev/peps/pep-0563/#resolving-type-hints-at-runtime
from __future__ import annotations
__all__ = [
"FlexElement",
"FlexQueryResponse",
"FlexStatement",
"AccountInformation",
"ChangeInNAV",
"MTMPerformanceSummaryUnderlying",
"EquitySummaryByReportDateInBase",
"MTDYTDPerformanceSummaryUnderlying",
"CashReportCurrency",
"FIFOPerformanceSummaryUnderlying",
"NetStockPosition",
"UnsettledTransfer",
"UnbundledCommissionDetail",
"StatementOfFundsLine",
"ChangeInPositionValue",
"OpenPosition",
"FxLot",
"Trade",
"TradeConfirm",
"OptionEAE",
"TradeTransfer",
"TierInterestDetail",
"HardToBorrowDetail",
"InterestAccrualsCurrency",
"SLBActivity",
"Transfer",
"CorporateAction",
"CashTransaction",
"ChangeInDividendAccrual",
"OpenDividendAccrual",
"SecurityInfo",
"ConversionRate",
"PriorPeriodPosition",
"ClientFee",
"ClientFeesDetail",
"SalesTax",
"DebitCardActivity",
"SymbolSummary",
"Order"
]
import datetime
import decimal
from dataclasses import dataclass, astuple
from typing import Tuple, Optional
from ibflex import enums
@dataclass(frozen=True)
class FlexElement:
""" Base class for data element types """
def __iter__(self):
return iter(astuple(self))
def items(self):
for attr, val in self.__dict__.items():
yield attr, val
@dataclass(frozen=True)
class FlexQueryResponse(FlexElement):
""" Root element """
queryName: str
type: str
FlexStatements: Tuple["FlexStatement", ...]
def __repr__(self):
repr = (
f"{type(self).__name__}("
f"queryName={self.queryName!r}, "
f"type={self.type!r}, "
f"len(FlexStatements)={len(self.FlexStatements)}"
")"
)
return repr
@dataclass(frozen=True)
class FlexStatement(FlexElement):
""" Wrapped in <FlexStatements> """
accountId: str
fromDate: datetime.date
toDate: datetime.date
period: str
whenGenerated: datetime.datetime
AccountInformation: Optional["_AccountInformation"] = None
ChangeInNAV: Optional["_ChangeInNAV"] = None
CashReport: Tuple["CashReportCurrency", ...] = ()
MTDYTDPerformanceSummary: Tuple["MTDYTDPerformanceSummaryUnderlying", ...] = ()
MTMPerformanceSummaryInBase: Tuple["MTMPerformanceSummaryUnderlying", ...] = ()
EquitySummaryInBase: Tuple["EquitySummaryByReportDateInBase", ...] = ()
FIFOPerformanceSummaryInBase: Tuple["FIFOPerformanceSummaryUnderlying", ...] = ()
FdicInsuredDepositsByBank: Tuple = () # TODO
StmtFunds: Tuple["StatementOfFundsLine", ...] = ()
ChangeInPositionValues: Tuple["ChangeInPositionValue", ...] = ()
OpenPositions: Tuple["OpenPosition", ...] = ()
NetStockPositionSummary: Tuple["NetStockPosition", ...] = ()
ComplexPositions: Tuple = () # TODO
FxPositions: Tuple["FxLot", ...] = () # N.B. FXLot wrapped in FxLots
Trades: Tuple["Trade", ...] = ()
HKIPOSubscriptionActivity: Tuple = () # TODO
TradeConfirms: Tuple["TradeConfirm", ...] = ()
TransactionTaxes: Tuple = ()
OptionEAE: Tuple["_OptionEAE", ...] = ()
# Not a typo - they really spell it "Excercises"
PendingExcercises: Tuple = () # TODO
TradeTransfers: Tuple["TradeTransfer", ...] = ()
FxTransactions: Tuple = () # TODO
UnbookedTrades: Tuple = () # TODO
RoutingCommissions: Tuple = () # TODO
IBGNoteTransactions: Tuple = () # TODO
UnsettledTransfers: Tuple["UnsettledTransfer", ...] = ()
UnbundledCommissionDetails: Tuple["UnbundledCommissionDetail", ...] = ()
Adjustments: Tuple = () # TODO
PriorPeriodPositions: Tuple["PriorPeriodPosition", ...] = ()
CorporateActions: Tuple["CorporateAction", ...] = ()
ClientFees: Tuple["ClientFee", ...] = ()
ClientFeesDetail: Tuple["_ClientFeesDetail", ...] = ()
DebitCardActivities: Tuple["DebitCardActivity", ...] = ()
SoftDollars: Tuple = () # TODO
CashTransactions: Tuple["CashTransaction", ...] = ()
SalesTaxes: Tuple["SalesTax", ...] = ()
CFDCharges: Tuple = () # TODO
InterestAccruals: Tuple["InterestAccrualsCurrency", ...] = ()
TierInterestDetails: Tuple["TierInterestDetail", ...] = ()
HardToBorrowDetails: Tuple["HardToBorrowDetail", ...] = ()
HardToBorrowMarkupDetails: Tuple = ()
SLBOpenContracts: Tuple = () # TODO
SLBActivities: Tuple["SLBActivity", ...] = ()
SLBFees: Tuple["SLBFee", ...] = ()
Transfers: Tuple["Transfer", ...] = ()
ChangeInDividendAccruals: Tuple["_ChangeInDividendAccrual", ...] = ()
OpenDividendAccruals: Tuple["OpenDividendAccrual", ...] = ()
SecuritiesInfo: Tuple["SecurityInfo", ...] = ()
ConversionRates: Tuple["ConversionRate", ...] = ()
HKIPOOpenSubscriptions: Tuple = () # TODO
CommissionCredits: Tuple = () # TODO
StockGrantActivities: Tuple = () # TODO
def __repr__(self):
repr = (
f"{type(self).__name__}("
f"accountId={self.accountId!r}, "
f"fromDate={self.fromDate!r}, "
f"toDate={self.toDate!r}, "
f"period={self.period!r}, "
f"whenGenerated={self.whenGenerated!r}"
)
sequences = (
(k, getattr(self, k))
for k, v in self.__annotations__.items()
if hasattr(v, "__origin__") and v.__origin__ is tuple
)
nonempty_sequences = ", ".join(
f"len({name})={len(value)}" for (name, value) in sequences if value
)
if nonempty_sequences:
repr += ", "
for seq in nonempty_sequences:
repr += seq
repr += ")"
return repr
@dataclass(frozen=True)
class AccountInformation(FlexElement):
""" Child of <FlexStatement> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
name: Optional[str] = None
accountType: Optional[str] = None
customerType: Optional[str] = None
accountCapabilities: Tuple[str, ...] = ()
tradingPermissions: Tuple[str, ...] = ()
registeredRepName: Optional[str] = None
registeredRepPhone: Optional[str] = None
dateOpened: Optional[datetime.date] = None
dateFunded: Optional[datetime.date] = None
dateClosed: Optional[datetime.date] = None
street: Optional[str] = None
street2: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
country: Optional[str] = None
postalCode: Optional[str] = None
streetResidentialAddress: Optional[str] = None
street2ResidentialAddress: Optional[str] = None
cityResidentialAddress: Optional[str] = None
stateResidentialAddress: Optional[str] = None
countryResidentialAddress: Optional[str] = None
postalCodeResidentialAddress: Optional[str] = None
masterName: Optional[str] = None
ibEntity: Optional[str] = None
primaryEmail: Optional[str] = None
accountRepName: Optional[str] = None
accountRepPhone: Optional[str] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_AccountInformation = AccountInformation
@dataclass(frozen=True)
class ChangeInNAV(FlexElement):
""" Child of <FlexStatement> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
fromDate: Optional[datetime.date] = None
toDate: Optional[datetime.date] = None
startingValue: Optional[decimal.Decimal] = None
mtm: Optional[decimal.Decimal] = None
realized: Optional[decimal.Decimal] = None
changeInUnrealized: Optional[decimal.Decimal] = None
costAdjustments: Optional[decimal.Decimal] = None
transferredPnlAdjustments: Optional[decimal.Decimal] = None
depositsWithdrawals: Optional[decimal.Decimal] = None
internalCashTransfers: Optional[decimal.Decimal] = None
assetTransfers: Optional[decimal.Decimal] = None
debitCardActivity: Optional[decimal.Decimal] = None
billPay: Optional[decimal.Decimal] = None
dividends: Optional[decimal.Decimal] = None
withholdingTax: Optional[decimal.Decimal] = None
withholding871m: Optional[decimal.Decimal] = None
withholdingTaxCollected: Optional[decimal.Decimal] = None
changeInDividendAccruals: Optional[decimal.Decimal] = None
interest: Optional[decimal.Decimal] = None
changeInInterestAccruals: Optional[decimal.Decimal] = None
advisorFees: Optional[decimal.Decimal] = None
brokerFees: Optional[decimal.Decimal] = None
changeInBrokerFeeAccruals: Optional[decimal.Decimal] = None
clientFees: Optional[decimal.Decimal] = None
otherFees: Optional[decimal.Decimal] = None
feesReceivables: Optional[decimal.Decimal] = None
commissions: Optional[decimal.Decimal] = None
commissionReceivables: Optional[decimal.Decimal] = None
forexCommissions: Optional[decimal.Decimal] = None
transactionTax: Optional[decimal.Decimal] = None
taxReceivables: Optional[decimal.Decimal] = None
salesTax: Optional[decimal.Decimal] = None
softDollars: Optional[decimal.Decimal] = None
netFxTrading: Optional[decimal.Decimal] = None
fxTranslation: Optional[decimal.Decimal] = None
linkingAdjustments: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
endingValue: Optional[decimal.Decimal] = None
twr: Optional[decimal.Decimal] = None
corporateActionProceeds: Optional[decimal.Decimal] = None
commissionCreditsRedemption: Optional[decimal.Decimal] = None
grantActivity: Optional[decimal.Decimal] = None
excessFundSweep: Optional[decimal.Decimal] = None
billableSalesTax: Optional[decimal.Decimal] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_ChangeInNAV = ChangeInNAV
@dataclass(frozen=True)
class MTMPerformanceSummaryUnderlying(FlexElement):
""" Wrapped in <MTMPerformanceSummaryInBase> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
prevCloseQuantity: Optional[decimal.Decimal] = None
prevClosePrice: Optional[decimal.Decimal] = None
closeQuantity: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
transactionMtm: Optional[decimal.Decimal] = None
priorOpenMtm: Optional[decimal.Decimal] = None
commissions: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
total: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
corpActionMtm: Optional[decimal.Decimal] = None
dividends: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
otherWithAccruals: Optional[decimal.Decimal] = None
totalWithAccruals: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class EquitySummaryByReportDateInBase(FlexElement):
""" Wrapped in <EquitySummaryInBase> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
reportDate: Optional[datetime.date] = None
cash: Optional[decimal.Decimal] = None
cashLong: Optional[decimal.Decimal] = None
cashShort: Optional[decimal.Decimal] = None
slbCashCollateral: Optional[decimal.Decimal] = None
slbCashCollateralLong: Optional[decimal.Decimal] = None
slbCashCollateralShort: Optional[decimal.Decimal] = None
stock: Optional[decimal.Decimal] = None
stockLong: Optional[decimal.Decimal] = None
stockShort: Optional[decimal.Decimal] = None
slbDirectSecuritiesBorrowed: Optional[decimal.Decimal] = None
slbDirectSecuritiesBorrowedLong: Optional[decimal.Decimal] = None
slbDirectSecuritiesBorrowedShort: Optional[decimal.Decimal] = None
slbDirectSecuritiesLent: Optional[decimal.Decimal] = None
slbDirectSecuritiesLentLong: Optional[decimal.Decimal] = None
slbDirectSecuritiesLentShort: Optional[decimal.Decimal] = None
options: Optional[decimal.Decimal] = None
optionsLong: Optional[decimal.Decimal] = None
optionsShort: Optional[decimal.Decimal] = None
bonds: Optional[decimal.Decimal] = None
bondsLong: Optional[decimal.Decimal] = None
bondsShort: Optional[decimal.Decimal] = None
bondInterestAccrualsComponent: Optional[decimal.Decimal] = None
bondInterestAccrualsComponentLong: Optional[decimal.Decimal] = None
bondInterestAccrualsComponentShort: Optional[decimal.Decimal] = None
notes: Optional[decimal.Decimal] = None
notesLong: Optional[decimal.Decimal] = None
notesShort: Optional[decimal.Decimal] = None
interestAccruals: Optional[decimal.Decimal] = None
interestAccrualsLong: Optional[decimal.Decimal] = None
interestAccrualsShort: Optional[decimal.Decimal] = None
softDollars: Optional[decimal.Decimal] = None
softDollarsLong: Optional[decimal.Decimal] = None
softDollarsShort: Optional[decimal.Decimal] = None
dividendAccruals: Optional[decimal.Decimal] = None
dividendAccrualsLong: Optional[decimal.Decimal] = None
dividendAccrualsShort: Optional[decimal.Decimal] = None
total: Optional[decimal.Decimal] = None
totalLong: Optional[decimal.Decimal] = None
totalShort: Optional[decimal.Decimal] = None
commodities: Optional[decimal.Decimal] = None
commoditiesLong: Optional[decimal.Decimal] = None
commoditiesShort: Optional[decimal.Decimal] = None
funds: Optional[decimal.Decimal] = None
fundsLong: Optional[decimal.Decimal] = None
fundsShort: Optional[decimal.Decimal] = None
forexCfdUnrealizedPl: Optional[decimal.Decimal] = None
forexCfdUnrealizedPlLong: Optional[decimal.Decimal] = None
forexCfdUnrealizedPlShort: Optional[decimal.Decimal] = None
brokerInterestAccrualsComponent: Optional[decimal.Decimal] = None
brokerCashComponent: Optional[decimal.Decimal] = None
brokerFeesAccrualsComponent: Optional[decimal.Decimal] = None
brokerFeesAccrualsComponentLong: Optional[decimal.Decimal] = None
brokerFeesAccrualsComponentShort: Optional[decimal.Decimal] = None
cfdUnrealizedPl: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccount: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountLong: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountShort: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountCashComponent: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountCashComponentLong: Optional[decimal.Decimal] = None
fdicInsuredBankSweepAccountCashComponentShort: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccruals: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsLong: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsShort: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsComponent: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsComponentLong: Optional[decimal.Decimal] = None
fdicInsuredAccountInterestAccrualsComponentShort: Optional[decimal.Decimal] = None
brokerCashComponentLong: Optional[decimal.Decimal] = None
brokerCashComponentShort: Optional[decimal.Decimal] = None
brokerInterestAccrualsComponentLong: Optional[decimal.Decimal] = None
brokerInterestAccrualsComponentShort: Optional[decimal.Decimal] = None
cfdUnrealizedPlLong: Optional[decimal.Decimal] = None
cfdUnrealizedPlShort: Optional[decimal.Decimal] = None
ipoSubscription: Optional[decimal.Decimal] = None
ipoSubscriptionLong: Optional[decimal.Decimal] = None
ipoSubscriptionShort: Optional[decimal.Decimal] = None
physDel: Optional[decimal.Decimal] = None
physDelLong: Optional[decimal.Decimal] = None
physDelShort: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class MTDYTDPerformanceSummaryUnderlying(FlexElement):
""" Wrapped in <MTDYTDPerformanceSummary> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
mtmMTD: Optional[decimal.Decimal] = None
mtmYTD: Optional[decimal.Decimal] = None
realSTMTD: Optional[decimal.Decimal] = None
realSTYTD: Optional[decimal.Decimal] = None
realLTMTD: Optional[decimal.Decimal] = None
realLTYTD: Optional[decimal.Decimal] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
realizedPnlMTD: Optional[decimal.Decimal] = None
realizedCapitalGainsPnlMTD: Optional[decimal.Decimal] = None
realizedFxPnlMTD: Optional[decimal.Decimal] = None
realizedPnlYTD: Optional[decimal.Decimal] = None
realizedCapitalGainsPnlYTD: Optional[decimal.Decimal] = None
realizedFxPnlYTD: Optional[decimal.Decimal] = None
brokerFees: Optional[decimal.Decimal] = None
brokerFeesSec: Optional[decimal.Decimal] = None
brokerFeesCom: Optional[decimal.Decimal] = None
brokerFeesMTD: Optional[decimal.Decimal] = None
brokerFeesYTD: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class CashReportCurrency(FlexElement):
""" Wrapped in <CashReport> """
accountId: Optional[str] = None
currency: Optional[str] = None
fromDate: Optional[datetime.date] = None
toDate: Optional[datetime.date] = None
startingCash: Optional[decimal.Decimal] = None
startingCashSec: Optional[decimal.Decimal] = None
startingCashCom: Optional[decimal.Decimal] = None
clientFees: Optional[decimal.Decimal] = None
clientFeesSec: Optional[decimal.Decimal] = None
clientFeesCom: Optional[decimal.Decimal] = None
commissions: Optional[decimal.Decimal] = None
commissionsSec: Optional[decimal.Decimal] = None
commissionsCom: Optional[decimal.Decimal] = None
billableCommissions: Optional[decimal.Decimal] = None
billableCommissionsSec: Optional[decimal.Decimal] = None
billableCommissionsCom: Optional[decimal.Decimal] = None
depositWithdrawals: Optional[decimal.Decimal] = None
depositWithdrawalsSec: Optional[decimal.Decimal] = None
depositWithdrawalsCom: Optional[decimal.Decimal] = None
deposits: Optional[decimal.Decimal] = None
depositsSec: Optional[decimal.Decimal] = None
depositsCom: Optional[decimal.Decimal] = None
withdrawals: Optional[decimal.Decimal] = None
withdrawalsSec: Optional[decimal.Decimal] = None
withdrawalsCom: Optional[decimal.Decimal] = None
accountTransfers: Optional[decimal.Decimal] = None
accountTransfersSec: Optional[decimal.Decimal] = None
accountTransfersCom: Optional[decimal.Decimal] = None
internalTransfers: Optional[decimal.Decimal] = None
internalTransfersSec: Optional[decimal.Decimal] = None
internalTransfersCom: Optional[decimal.Decimal] = None
dividends: Optional[decimal.Decimal] = None
dividendsSec: Optional[decimal.Decimal] = None
dividendsCom: Optional[decimal.Decimal] = None
brokerFees: Optional[decimal.Decimal] = None
brokerFeesSec: Optional[decimal.Decimal] = None
brokerFeesCom: Optional[decimal.Decimal] = None
brokerFeesMTD: Optional[decimal.Decimal] = None
brokerFeesYTD: Optional[decimal.Decimal] = None
brokerInterest: Optional[decimal.Decimal] = None
brokerInterestSec: Optional[decimal.Decimal] = None
brokerInterestCom: Optional[decimal.Decimal] = None
bondInterest: Optional[decimal.Decimal] = None
bondInterestSec: Optional[decimal.Decimal] = None
bondInterestCom: Optional[decimal.Decimal] = None
cashSettlingMtm: Optional[decimal.Decimal] = None
cashSettlingMtmSec: Optional[decimal.Decimal] = None
cashSettlingMtmCom: Optional[decimal.Decimal] = None
cfdCharges: Optional[decimal.Decimal] = None
cfdChargesSec: Optional[decimal.Decimal] = None
cfdChargesCom: Optional[decimal.Decimal] = None
netTradesSales: Optional[decimal.Decimal] = None
netTradesSalesSec: Optional[decimal.Decimal] = None
netTradesSalesCom: Optional[decimal.Decimal] = None
netTradesPurchases: Optional[decimal.Decimal] = None
netTradesPurchasesSec: Optional[decimal.Decimal] = None
netTradesPurchasesCom: Optional[decimal.Decimal] = None
feesReceivables: Optional[decimal.Decimal] = None
feesReceivablesSec: Optional[decimal.Decimal] = None
feesReceivablesCom: Optional[decimal.Decimal] = None
paymentInLieu: Optional[decimal.Decimal] = None
paymentInLieuSec: Optional[decimal.Decimal] = None
paymentInLieuCom: Optional[decimal.Decimal] = None
transactionTax: Optional[decimal.Decimal] = None
transactionTaxSec: Optional[decimal.Decimal] = None
transactionTaxCom: Optional[decimal.Decimal] = None
withholdingTax: Optional[decimal.Decimal] = None
withholdingTaxSec: Optional[decimal.Decimal] = None
withholdingTaxCom: Optional[decimal.Decimal] = None
fxTranslationGainLoss: Optional[decimal.Decimal] = None
fxTranslationGainLossSec: Optional[decimal.Decimal] = None
fxTranslationGainLossCom: Optional[decimal.Decimal] = None
otherFees: Optional[decimal.Decimal] = None
otherFeesSec: Optional[decimal.Decimal] = None
otherFeesCom: Optional[decimal.Decimal] = None
endingCash: Optional[decimal.Decimal] = None
endingCashSec: Optional[decimal.Decimal] = None
endingCashCom: Optional[decimal.Decimal] = None
endingSettledCash: Optional[decimal.Decimal] = None
endingSettledCashSec: Optional[decimal.Decimal] = None
endingSettledCashCom: Optional[decimal.Decimal] = None
clientFeesMTD: Optional[decimal.Decimal] = None
clientFeesYTD: Optional[decimal.Decimal] = None
commissionsMTD: Optional[decimal.Decimal] = None
commissionsYTD: Optional[decimal.Decimal] = None
billableCommissionsMTD: Optional[decimal.Decimal] = None
billableCommissionsYTD: Optional[decimal.Decimal] = None
depositWithdrawalsMTD: Optional[decimal.Decimal] = None
depositWithdrawalsYTD: Optional[decimal.Decimal] = None
depositsMTD: Optional[decimal.Decimal] = None
depositsYTD: Optional[decimal.Decimal] = None
withdrawalsMTD: Optional[decimal.Decimal] = None
withdrawalsYTD: Optional[decimal.Decimal] = None
accountTransfersMTD: Optional[decimal.Decimal] = None
accountTransfersYTD: Optional[decimal.Decimal] = None
internalTransfersMTD: Optional[decimal.Decimal] = None
internalTransfersYTD: Optional[decimal.Decimal] = None
excessFundSweep: Optional[decimal.Decimal] = None
excessFundSweepSec: Optional[decimal.Decimal] = None
excessFundSweepCom: Optional[decimal.Decimal] = None
excessFundSweepMTD: Optional[decimal.Decimal] = None
excessFundSweepYTD: Optional[decimal.Decimal] = None
dividendsMTD: Optional[decimal.Decimal] = None
dividendsYTD: Optional[decimal.Decimal] = None
insuredDepositInterestMTD: Optional[decimal.Decimal] = None
insuredDepositInterestYTD: Optional[decimal.Decimal] = None
brokerInterestMTD: Optional[decimal.Decimal] = None
brokerInterestYTD: Optional[decimal.Decimal] = None
bondInterestMTD: Optional[decimal.Decimal] = None
bondInterestYTD: Optional[decimal.Decimal] = None
cashSettlingMtmMTD: Optional[decimal.Decimal] = None
cashSettlingMtmYTD: Optional[decimal.Decimal] = None
realizedVmMTD: Optional[decimal.Decimal] = None
realizedVmYTD: Optional[decimal.Decimal] = None
cfdChargesMTD: Optional[decimal.Decimal] = None
cfdChargesYTD: Optional[decimal.Decimal] = None
netTradesSalesMTD: Optional[decimal.Decimal] = None
netTradesSalesYTD: Optional[decimal.Decimal] = None
advisorFeesMTD: Optional[decimal.Decimal] = None
advisorFeesYTD: Optional[decimal.Decimal] = None
feesReceivablesMTD: Optional[decimal.Decimal] = None
feesReceivablesYTD: Optional[decimal.Decimal] = None
netTradesPurchasesMTD: Optional[decimal.Decimal] = None
netTradesPurchasesYTD: Optional[decimal.Decimal] = None
paymentInLieuMTD: Optional[decimal.Decimal] = None
paymentInLieuYTD: Optional[decimal.Decimal] = None
transactionTaxMTD: Optional[decimal.Decimal] = None
transactionTaxYTD: Optional[decimal.Decimal] = None
taxReceivablesMTD: Optional[decimal.Decimal] = None
taxReceivablesYTD: Optional[decimal.Decimal] = None
withholdingTaxMTD: Optional[decimal.Decimal] = None
withholdingTaxYTD: Optional[decimal.Decimal] = None
withholding871mMTD: Optional[decimal.Decimal] = None
withholding871mYTD: Optional[decimal.Decimal] = None
withholdingCollectedTaxMTD: Optional[decimal.Decimal] = None
withholdingCollectedTaxYTD: Optional[decimal.Decimal] = None
salesTaxMTD: Optional[decimal.Decimal] = None
salesTaxYTD: Optional[decimal.Decimal] = None
otherFeesMTD: Optional[decimal.Decimal] = None
otherFeesYTD: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
avgCreditBalance: Optional[decimal.Decimal] = None
avgCreditBalanceSec: Optional[decimal.Decimal] = None
avgCreditBalanceCom: Optional[decimal.Decimal] = None
avgDebitBalance: Optional[decimal.Decimal] = None
avgDebitBalanceSec: Optional[decimal.Decimal] = None
avgDebitBalanceCom: Optional[decimal.Decimal] = None
linkingAdjustments: Optional[decimal.Decimal] = None
linkingAdjustmentsSec: Optional[decimal.Decimal] = None
linkingAdjustmentsCom: Optional[decimal.Decimal] = None
insuredDepositInterest: Optional[decimal.Decimal] = None
insuredDepositInterestSec: Optional[decimal.Decimal] = None
insuredDepositInterestCom: Optional[decimal.Decimal] = None
realizedVm: Optional[decimal.Decimal] = None
realizedVmSec: Optional[decimal.Decimal] = None
realizedVmCom: Optional[decimal.Decimal] = None
advisorFees: Optional[decimal.Decimal] = None
advisorFeesSec: Optional[decimal.Decimal] = None
advisorFeesCom: Optional[decimal.Decimal] = None
taxReceivables: Optional[decimal.Decimal] = None
taxReceivablesSec: Optional[decimal.Decimal] = None
taxReceivablesCom: Optional[decimal.Decimal] = None
withholding871m: Optional[decimal.Decimal] = None
withholding871mSec: Optional[decimal.Decimal] = None
withholding871mCom: Optional[decimal.Decimal] = None
withholdingCollectedTax: Optional[decimal.Decimal] = None
withholdingCollectedTaxSec: Optional[decimal.Decimal] = None
withholdingCollectedTaxCom: Optional[decimal.Decimal] = None
salesTax: Optional[decimal.Decimal] = None
salesTaxSec: Optional[decimal.Decimal] = None
salesTaxCom: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
otherSec: Optional[decimal.Decimal] = None
otherCom: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
debitCardActivity: Optional[decimal.Decimal] = None
debitCardActivitySec: Optional[decimal.Decimal] = None
debitCardActivityCom: Optional[decimal.Decimal] = None
debitCardActivityMTD: Optional[decimal.Decimal] = None
debitCardActivityYTD: Optional[decimal.Decimal] = None
billPay: Optional[decimal.Decimal] = None
billPaySec: Optional[decimal.Decimal] = None
billPayCom: Optional[decimal.Decimal] = None
billPayMTD: Optional[decimal.Decimal] = None
billPayYTD: Optional[decimal.Decimal] = None
realizedForexVm: Optional[decimal.Decimal] = None
realizedForexVmSec: Optional[decimal.Decimal] = None
realizedForexVmCom: Optional[decimal.Decimal] = None
realizedForexVmMTD: Optional[decimal.Decimal] = None
realizedForexVmYTD: Optional[decimal.Decimal] = None
ipoSubscription: Optional[decimal.Decimal] = None
ipoSubscriptionSec: Optional[decimal.Decimal] = None
ipoSubscriptionCom: Optional[decimal.Decimal] = None
ipoSubscriptionMTD: Optional[decimal.Decimal] = None
ipoSubscriptionYTD: Optional[decimal.Decimal] = None
billableSalesTax: Optional[decimal.Decimal] = None
billableSalesTaxSec: Optional[decimal.Decimal] = None
billableSalesTaxCom: Optional[decimal.Decimal] = None
billableSalesTaxMTD: Optional[decimal.Decimal] = None
billableSalesTaxYTD: Optional[decimal.Decimal] = None
commissionCreditsRedemption: Optional[decimal.Decimal] = None
commissionCreditsRedemptionSec: Optional[decimal.Decimal] = None
commissionCreditsRedemptionCom: Optional[decimal.Decimal] = None
commissionCreditsRedemptionMTD: Optional[decimal.Decimal] = None
commissionCreditsRedemptionYTD: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class StatementOfFundsLine(FlexElement):
""" Wrapped in <StmtFunds> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
balance: Optional[decimal.Decimal] = None
debit: Optional[decimal.Decimal] = None
credit: Optional[decimal.Decimal] = None
currency: Optional[str] = None
tradeID: Optional[str] = None
# Despite the name, `date` actually contains date/time data.
date: Optional[datetime.datetime] = None
reportDate: Optional[datetime.date] = None
activityDescription: Optional[str] = None
amount: Optional[decimal.Decimal] = None
buySell: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
fxRateToBase: Optional[decimal.Decimal] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
settleDate: Optional[datetime.date] = None
activityCode: Optional[str] = None # FIXME
orderID: Optional[str] = None
tradeQuantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeGross: Optional[decimal.Decimal] = None
tradeCommission: Optional[decimal.Decimal] = None
tradeTax: Optional[decimal.Decimal] = None
tradeCode: Optional[str] = None
levelOfDetail: Optional[str] = None
transactionID: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class ChangeInPositionValue(FlexElement):
""" Wrapped in <ChangeInPositionValues> """
assetCategory: Optional[enums.AssetClass] = None
currency: Optional[str] = None
priorPeriodValue: Optional[decimal.Decimal] = None
transactions: Optional[decimal.Decimal] = None
mtmPriorPeriodPositions: Optional[decimal.Decimal] = None
mtmTransactions: Optional[decimal.Decimal] = None
corporateActions: Optional[decimal.Decimal] = None
accountTransfers: Optional[decimal.Decimal] = None
fxTranslationPnl: Optional[decimal.Decimal] = None
futurePriceAdjustments: Optional[decimal.Decimal] = None
settledCash: Optional[decimal.Decimal] = None
endOfPeriodValue: Optional[decimal.Decimal] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
other: Optional[decimal.Decimal] = None
linkingAdjustments: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class OpenPosition(FlexElement):
""" Wrapped in <OpenPositions> """
side: Optional[enums.LongShort] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
position: Optional[decimal.Decimal] = None
markPrice: Optional[decimal.Decimal] = None
positionValue: Optional[decimal.Decimal] = None
openPrice: Optional[decimal.Decimal] = None
costBasisPrice: Optional[decimal.Decimal] = None
costBasisMoney: Optional[decimal.Decimal] = None
fifoPnlUnrealized: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
securityIDType: Optional[str] = None
issuer: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
originatingOrderID: Optional[str] = None
originatingTransactionID: Optional[str] = None
accruedInt: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
percentOfNAV: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
positionValueInBase: Optional[decimal.Decimal] = None
unrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None
unrealizedlFxPnl: Optional[decimal.Decimal] = None
vestingDate: Optional[datetime.date] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class FxLot(FlexElement):
""" Wrapped in <FxLots>, which in turn is wrapped in <FxPositions> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
reportDate: Optional[datetime.date] = None
functionalCurrency: Optional[str] = None
fxCurrency: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
costPrice: Optional[decimal.Decimal] = None
costBasis: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
value: Optional[decimal.Decimal] = None
unrealizedPL: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
lotDescription: Optional[str] = None
lotOpenDateTime: Optional[datetime.datetime] = None
levelOfDetail: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
@dataclass(frozen=True)
class Trade(FlexElement):
""" Wrapped in <Trades> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
buySell: Optional[enums.BuySell] = None
orderType: Optional[enums.OrderType] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
netCash: Optional[decimal.Decimal] = None
netCashInBase: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
ibOrderID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
clearingFirmID: Optional[str] = None
# Effective 2013, every Trade has a `transactionID` attribute that can't
# be deselected in the Flex query template.
transactionID: Optional[str] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
ibExecID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
sedol: Optional[str] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
accruedInt: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class Lot(FlexElement):
""" Wrapped in <Trades> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
buySell: Optional[enums.BuySell] = None
orderType: Optional[enums.OrderType] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
netCash: Optional[decimal.Decimal] = None
netCashInBase: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
ibOrderID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
clearingFirmID: Optional[str] = None
# Effective 2013, every Trade has a `transactionID` attribute that can't
# be deselected in the Flex query template.
transactionID: Optional[str] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
ibExecID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
sedol: Optional[str] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
@dataclass(frozen=True)
class UnbundledCommissionDetail(FlexElement):
""" Wrapped in <UnbundledCommissionDetails> """
buySell: Optional[enums.BuySell] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
orderReference: Optional[str] = None
totalCommission: Optional[decimal.Decimal] = None
brokerExecutionCharge: Optional[decimal.Decimal] = None
brokerClearingCharge: Optional[decimal.Decimal] = None
thirdPartyExecutionCharge: Optional[decimal.Decimal] = None
thirdPartyClearingCharge: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None
regFINRATradingActivityFee: Optional[decimal.Decimal] = None
regSection31TransactionFee: Optional[decimal.Decimal] = None
regOther: Optional[decimal.Decimal] = None
other: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class SymbolSummary(FlexElement):
""" Wrapped in <TradeConfirms> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
transactionType: Optional[enums.TradeType] = None
tradeID: Optional[str] = None
orderID: Optional[decimal.Decimal] = None
execID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
clearingFirmID: Optional[str] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
dateTime: Optional[datetime.datetime] = None
reportDate: Optional[datetime.date] = None
settleDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
exchange: Optional[str] = None
buySell: Optional[enums.BuySell] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
amount: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
commission: Optional[decimal.Decimal] = None
brokerExecutionCommission: Optional[decimal.Decimal] = None
brokerClearingCommission: Optional[decimal.Decimal] = None
thirdPartyExecutionCommission: Optional[decimal.Decimal] = None
thirdPartyClearingCommission: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None
otherCommission: Optional[decimal.Decimal] = None
commissionCurrency: Optional[str] = None
tax: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
orderType: Optional[enums.OrderType] = None
levelOfDetail: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
allocatedTo: Optional[str] = None
accruedInt: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class Order(FlexElement):
""" Wrapped in <TradeConfirms> or <Trades>"""
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
transactionType: Optional[enums.TradeType] = None
tradeID: Optional[str] = None
orderID: Optional[decimal.Decimal] = None
execID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
clearingFirmID: Optional[str] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
dateTime: Optional[datetime.datetime] = None
reportDate: Optional[datetime.date] = None
settleDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
exchange: Optional[str] = None
buySell: Optional[enums.BuySell] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
amount: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
commission: Optional[decimal.Decimal] = None
brokerExecutionCommission: Optional[decimal.Decimal] = None
brokerClearingCommission: Optional[decimal.Decimal] = None
thirdPartyExecutionCommission: Optional[decimal.Decimal] = None
thirdPartyClearingCommission: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None
otherCommission: Optional[decimal.Decimal] = None
commissionCurrency: Optional[str] = None
tax: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
orderType: Optional[enums.OrderType] = None
levelOfDetail: Optional[str] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
allocatedTo: Optional[str] = None
accruedInt: Optional[decimal.Decimal] = None
netCash: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibOrderID: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
settleDateTarget: Optional[datetime.date] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
closePrice: Optional[decimal.Decimal] = None
openCloseIndicator: Optional[enums.OpenClose] = None
notes: Optional[str] = None
cost: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origOrderID: Optional[str] = None
transactionID: Optional[str] = None
ibExecID: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class TradeConfirm(FlexElement):
""" Wrapped in <TradeConfirms> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
buySell: Optional[enums.BuySell] = None
orderType: Optional[enums.OrderType] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
netCash: Optional[decimal.Decimal] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
clearingFirmID: Optional[str] = None
transactionID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
levelOfDetail: Optional[str] = None
commissionCurrency: Optional[str] = None
price: Optional[decimal.Decimal] = None
thirdPartyClearingCommission: Optional[decimal.Decimal] = None
orderID: Optional[decimal.Decimal] = None
allocatedTo: Optional[str] = None
thirdPartyRegulatoryCommission: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
brokerExecutionCommission: Optional[decimal.Decimal] = None
thirdPartyExecutionCommission: Optional[decimal.Decimal] = None
amount: Optional[decimal.Decimal] = None
otherCommission: Optional[decimal.Decimal] = None
commission: Optional[decimal.Decimal] = None
brokerClearingCommission: Optional[decimal.Decimal] = None
ibOrderID: Optional[str] = None
ibExecID: Optional[str] = None
execID: Optional[str] = None
brokerageOrderID: Optional[str] = None
orderReference: Optional[str] = None
volatilityOrderLink: Optional[str] = None
exchOrderId: Optional[str] = None
extExecID: Optional[str] = None
# Despite the name, `orderTime` actually contains date/time data.
orderTime: Optional[datetime.datetime] = None
changeInPrice: Optional[decimal.Decimal] = None
changeInQuantity: Optional[decimal.Decimal] = None
traderID: Optional[str] = None
isAPIOrder: Optional[bool] = None
code: Tuple[enums.Code, ...] = ()
tax: Optional[decimal.Decimal] = None
listingExchange: Optional[str] = None
underlyingListingExchange: Optional[str] = None
settleDate: Optional[datetime.date] = None
underlyingSecurityID: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
accruedInt: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class OptionEAE(FlexElement):
"""Option Exercise Assignment or Expiration
Wrapped in (identically-named) <OptionEAE>
"""
transactionType: Optional[enums.OptionAction] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
markPrice: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
commisionsAndTax: Optional[decimal.Decimal] = None
costBasis: Optional[decimal.Decimal] = None
realizedPnl: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_OptionEAE = OptionEAE
@dataclass(frozen=True)
class TradeTransfer(FlexElement):
""" Wrapped in <TradeTransfers> """
transactionType: Optional[enums.TradeType] = None
openCloseIndicator: Optional[enums.OpenClose] = None
direction: Optional[enums.ToFrom] = None
deliveredReceived: Optional[enums.DeliveredReceived] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
tradeID: Optional[str] = None
reportDate: Optional[datetime.date] = None
tradeDate: Optional[datetime.date] = None
tradeTime: Optional[datetime.time] = None
settleDateTarget: Optional[datetime.date] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeMoney: Optional[decimal.Decimal] = None
taxes: Optional[decimal.Decimal] = None
ibCommission: Optional[decimal.Decimal] = None
ibCommissionCurrency: Optional[str] = None
closePrice: Optional[decimal.Decimal] = None
notes: Tuple[enums.Code, ...] = () # separator = ";"
cost: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
brokerName: Optional[str] = None
brokerAccount: Optional[str] = None
awayBrokerCommission: Optional[decimal.Decimal] = None
regulatoryFee: Optional[decimal.Decimal] = None
netTradeMoney: Optional[decimal.Decimal] = None
netTradeMoneyInBase: Optional[decimal.Decimal] = None
netTradePrice: Optional[decimal.Decimal] = None
multiplier: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
securityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
proceeds: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
netCash: Optional[decimal.Decimal] = None
origTradePrice: Optional[decimal.Decimal] = None
# Oddly, `origTradeDate` appears to have hard-coded YYYYMMDD format
# instead of the date format from the report configuration.
origTradeDate: Optional[datetime.date] = None
origTradeID: Optional[str] = None
origOrderID: Optional[str] = None
clearingFirmID: Optional[str] = None
transactionID: Optional[str] = None
openDateTime: Optional[datetime.datetime] = None
holdingPeriodDateTime: Optional[datetime.datetime] = None
whenRealized: Optional[datetime.datetime] = None
whenReopened: Optional[datetime.datetime] = None
levelOfDetail: Optional[str] = None
securityIDType: Optional[str] = None
@dataclass(frozen=True)
class InterestAccrualsCurrency(FlexElement):
""" Wrapped in <InterestAccruals> """
accountId: Optional[str] = None
currency: Optional[str] = None
fromDate: Optional[datetime.date] = None
toDate: Optional[datetime.date] = None
startingAccrualBalance: Optional[decimal.Decimal] = None
interestAccrued: Optional[decimal.Decimal] = None
accrualReversal: Optional[decimal.Decimal] = None
endingAccrualBalance: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
fxTranslation: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class TierInterestDetail(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
interestType: Optional[str] = None
valueDate: Optional[datetime.date] = None
tierBreak: Optional[str] = None
balanceThreshold: Optional[decimal.Decimal] = None
securitiesPrincipal: Optional[decimal.Decimal] = None
commoditiesPrincipal: Optional[decimal.Decimal] = None
ibuklPrincipal: Optional[decimal.Decimal] = None
totalPrincipal: Optional[decimal.Decimal] = None
rate: Optional[decimal.Decimal] = None
securitiesInterest: Optional[decimal.Decimal] = None
commoditiesInterest: Optional[decimal.Decimal] = None
ibuklInterest: Optional[decimal.Decimal] = None
totalInterest: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
@dataclass(frozen=True)
class HardToBorrowDetail(FlexElement):
""" Wrapped in <HardToBorrowDetails> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
valueDate: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
price: Optional[decimal.Decimal] = None
value: Optional[decimal.Decimal] = None
borrowFeeRate: Optional[decimal.Decimal] = None
borrowFee: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
@dataclass(frozen=True)
class SLBActivity(FlexElement):
""" Wrapped in <SLBActivities> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
slbTransactionId: Optional[str] = None
activityDescription: Optional[str] = None
type: Optional[str] = None
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
feeRate: Optional[decimal.Decimal] = None
collateralAmount: Optional[decimal.Decimal] = None
markQuantity: Optional[decimal.Decimal] = None
markPriorPrice: Optional[decimal.Decimal] = None
markCurrentPrice: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class SLBFee:
""" Wrapped in <SLBFees> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[str] = None
assetCategory: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
valueDate: Optional[datetime.date] = None
startDate: Optional[datetime.date] = None
type: Optional[str] = None # FIXME
exchange: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
collateralAmount: Optional[decimal.Decimal] = None
feeRate: Optional[decimal.Decimal] = None
fee: Optional[decimal.Decimal] = None
carryCharge: Optional[decimal.Decimal] = None
ticketCharge: Optional[decimal.Decimal] = None
totalCharges: Optional[decimal.Decimal] = None
marketFeeRate: Optional[decimal.Decimal] = None
grossLendFee: Optional[decimal.Decimal] = None
netLendFeeRate: Optional[decimal.Decimal] = None
netLendFee: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
@dataclass(frozen=True)
class Transfer(FlexElement):
""" Wrapped in <Transfers> """
type: Optional[enums.TransferType] = None
direction: Optional[enums.InOut] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
reportDate: Optional[datetime.date] = None
underlyingConid: Optional[str] = None
date: Optional[datetime.date] = None
dateTime: Optional[datetime.datetime] = None
account: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
transferPrice: Optional[decimal.Decimal] = None
positionAmount: Optional[decimal.Decimal] = None
positionAmountInBase: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
cashTransfer: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
clientReference: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
company: Optional[str] = None
accountName: Optional[str] = None
pnlAmount: Optional[decimal.Decimal] = None
pnlAmountInBase: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
transactionID: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class UnsettledTransfer(FlexElement):
""" Wrapped in <UnsettledTransfers> """
direction: Optional[enums.ToFrom] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
underlyingConid: Optional[str] = None
stage: Optional[str] = None
tradeDate: Optional[datetime.date] = None
targetSettlement: Optional[datetime.date] = None
contra: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
tradePrice: Optional[decimal.Decimal] = None
tradeAmount: Optional[decimal.Decimal] = None
tradeAmountInBase: Optional[decimal.Decimal] = None
transactionID: Optional[str] = None
@dataclass(frozen=True)
class PriorPeriodPosition(FlexElement):
""" Wrapped in <PriorPeriodPositions> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
priorMtmPnl: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
price: Optional[decimal.Decimal] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class CorporateAction(FlexElement):
""" Wrapped in <CorporateActions> """
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
actionDescription: Optional[str] = None
dateTime: Optional[datetime.datetime] = None
amount: Optional[decimal.Decimal] = None
quantity: Optional[decimal.Decimal] = None
fifoPnlRealized: Optional[decimal.Decimal] = None
capitalGainsPnl: Optional[decimal.Decimal] = None
fxPnl: Optional[decimal.Decimal] = None
mtmPnl: Optional[decimal.Decimal] = None
# Effective 2010, CorporateAction has a `type` attribute
type: Optional[enums.Reorg] = None
code: Tuple[enums.Code, ...] = ()
sedol: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
proceeds: Optional[decimal.Decimal] = None
value: Optional[decimal.Decimal] = None
transactionID: Optional[str] = None
@dataclass(frozen=True)
class CashTransaction(FlexElement):
""" Wrapped in <CashTransactions> """
type: Optional[enums.CashAction] = None
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
amount: Optional[decimal.Decimal] = None
dateTime: Optional[datetime.datetime] = None
sedol: Optional[str] = None
symbol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
transactionID: Optional[str] = None
reportDate: Optional[datetime.date] = None
clientReference: Optional[str] = None
settleDate: Optional[datetime.date] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
levelOfDetail: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class DebitCardActivity(FlexElement):
""" Wrapped in <DebitCardActivities> """
accountId: Optional[str] = None
acctAlias: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
status: Optional[str] = None
reportDate: Optional[datetime.date] = None
postingDate: Optional[datetime.date] = None
transactionDateTime: Optional[datetime.datetime] = None
category: Optional[str] = None
merchantNameLocation: Optional[str] = None
amount: Optional[decimal.Decimal] = None
model: Optional[str] = None
@dataclass(frozen=True)
class ChangeInDividendAccrual(FlexElement):
""" Wrapped in <ChangeInDividendAccruals> """
date: Optional[datetime.date] = None
assetCategory: Optional[enums.AssetClass] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
accountId: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
reportDate: Optional[datetime.date] = None
underlyingConid: Optional[str] = None
exDate: Optional[datetime.date] = None
payDate: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
tax: Optional[decimal.Decimal] = None
fee: Optional[decimal.Decimal] = None
grossRate: Optional[decimal.Decimal] = None
grossAmount: Optional[decimal.Decimal] = None
netAmount: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
# Type alias to work around https://github.com/python/mypy/issues/1775
_ChangeInDividendAccrual = ChangeInDividendAccrual
@dataclass(frozen=True)
class OpenDividendAccrual(FlexElement):
""" Wrapped in <OpenDividendAccruals> """
assetCategory: Optional[enums.AssetClass] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
accountId: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
exDate: Optional[datetime.date] = None
payDate: Optional[datetime.date] = None
quantity: Optional[decimal.Decimal] = None
tax: Optional[decimal.Decimal] = None
fee: Optional[decimal.Decimal] = None
grossRate: Optional[decimal.Decimal] = None
grossAmount: Optional[decimal.Decimal] = None
netAmount: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
fromAcct: Optional[str] = None
toAcct: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class SecurityInfo(FlexElement):
""" Wrapped in <SecuritiesInfo> """
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingCategory: Optional[str] = None
subCategory: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
maturity: Optional[str] = None
issueDate: Optional[datetime.date] = None
type: Optional[str] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
currency: Optional[str] = None
settlementPolicyMethod: Optional[str] = None
@dataclass(frozen=True)
class ConversionRate(FlexElement):
""" Wrapped in <ConversionRates> """
reportDate: Optional[datetime.date] = None
fromCurrency: Optional[str] = None
toCurrency: Optional[str] = None
rate: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class FIFOPerformanceSummaryUnderlying(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
listingExchange: Optional[str] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
realizedSTProfit: Optional[decimal.Decimal] = None
realizedSTLoss: Optional[decimal.Decimal] = None
realizedLTProfit: Optional[decimal.Decimal] = None
realizedLTLoss: Optional[decimal.Decimal] = None
totalRealizedPnl: Optional[decimal.Decimal] = None
unrealizedProfit: Optional[decimal.Decimal] = None
unrealizedLoss: Optional[decimal.Decimal] = None
totalUnrealizedPnl: Optional[decimal.Decimal] = None
totalFifoPnl: Optional[decimal.Decimal] = None
totalRealizedCapitalGainsPnl: Optional[decimal.Decimal] = None
totalRealizedFxPnl: Optional[decimal.Decimal] = None
totalUnrealizedCapitalGainsPnl: Optional[decimal.Decimal] = None
totalUnrealizedFxPnl: Optional[decimal.Decimal] = None
totalCapitalGainsPnl: Optional[decimal.Decimal] = None
totalFxPnl: Optional[decimal.Decimal] = None
transferredPnl: Optional[decimal.Decimal] = None
transferredCapitalGainsPnl: Optional[decimal.Decimal] = None
transferredFxPnl: Optional[decimal.Decimal] = None
sedol: Optional[str] = None
securityIDType: Optional[str] = None
underlyingSymbol: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
unrealizedSTProfit: Optional[decimal.Decimal] = None
unrealizedSTLoss: Optional[decimal.Decimal] = None
unrealizedLTProfit: Optional[decimal.Decimal] = None
unrealizedLTLoss: Optional[decimal.Decimal] = None
costAdj: Optional[decimal.Decimal] = None
code: Tuple[enums.Code, ...] = ()
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class NetStockPosition(FlexElement):
assetCategory: Optional[enums.AssetClass] = None
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
sedol: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
sharesAtIb: Optional[decimal.Decimal] = None
sharesBorrowed: Optional[decimal.Decimal] = None
sharesLent: Optional[decimal.Decimal] = None
netShares: Optional[decimal.Decimal] = None
serialNumber: Optional[str] = None
deliveryType: Optional[str] = None
commodityType: Optional[str] = None
fineness: Optional[decimal.Decimal] = None
weight: Optional[str] = None
@dataclass(frozen=True)
class ClientFee(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
feeType: Optional[str] = None
date: Optional[datetime.datetime] = None
description: Optional[str] = None
expenseIndicator: Optional[str] = None
revenue: Optional[decimal.Decimal] = None
expense: Optional[decimal.Decimal] = None
net: Optional[decimal.Decimal] = None
revenueInBase: Optional[decimal.Decimal] = None
expenseInBase: Optional[decimal.Decimal] = None
netInBase: Optional[decimal.Decimal] = None
tradeID: Optional[str] = None
execID: Optional[str] = None
levelOfDetail: Optional[str] = None
@dataclass(frozen=True)
class ClientFeesDetail(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
date: Optional[datetime.datetime] = None
tradeID: Optional[str] = None
execID: Optional[str] = None
totalRevenue: Optional[decimal.Decimal] = None
totalCommission: Optional[decimal.Decimal] = None
brokerExecutionCharge: Optional[decimal.Decimal] = None
clearingCharge: Optional[decimal.Decimal] = None
thirdPartyExecutionCharge: Optional[decimal.Decimal] = None
thirdPartyRegulatoryCharge: Optional[decimal.Decimal] = None
regFINRATradingActivityFee: Optional[decimal.Decimal] = None
regSection31TransactionFee: Optional[decimal.Decimal] = None
regOther: Optional[decimal.Decimal] = None
totalNet: Optional[decimal.Decimal] = None
totalNetInBase: Optional[decimal.Decimal] = None
levelOfDetail: Optional[str] = None
other: Optional[decimal.Decimal] = None
@dataclass(frozen=True)
class TransactionTax(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.datetime] = None
taxDescription: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
taxAmount: Optional[decimal.Decimal] = None
tradeId: Optional[str] = None
tradePrice: Optional[decimal.Decimal] = None
source: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
levelOfDetail: Optional[str] = None
@dataclass(frozen=True)
class TransactionTaxDetail(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.datetime] = None
taxDescription: Optional[str] = None
quantity: Optional[decimal.Decimal] = None
reportDate: Optional[datetime.date] = None
taxAmount: Optional[decimal.Decimal] = None
tradeId: Optional[str] = None
tradePrice: Optional[decimal.Decimal] = None
source: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
levelOfDetail: Optional[str] = None
@dataclass(frozen=True)
class SalesTax(FlexElement):
accountId: Optional[str] = None
acctAlias: Optional[str] = None
model: Optional[str] = None
currency: Optional[str] = None
fxRateToBase: Optional[decimal.Decimal] = None
assetCategory: Optional[enums.AssetClass] = None
symbol: Optional[str] = None
description: Optional[str] = None
conid: Optional[str] = None
securityID: Optional[str] = None
securityIDType: Optional[str] = None
cusip: Optional[str] = None
isin: Optional[str] = None
listingExchange: Optional[str] = None
underlyingConid: Optional[str] = None
underlyingSecurityID: Optional[str] = None
underlyingSymbol: Optional[str] = None
underlyingListingExchange: Optional[str] = None
issuer: Optional[str] = None
multiplier: Optional[decimal.Decimal] = None
strike: Optional[decimal.Decimal] = None
expiry: Optional[datetime.date] = None
putCall: Optional[enums.PutCall] = None
principalAdjustFactor: Optional[decimal.Decimal] = None
date: Optional[datetime.date] = None
country: Optional[str] = None
taxType: Optional[str] = None
payer: Optional[str] = None
taxableDescription: Optional[str] = None
taxableAmount: Optional[decimal.Decimal] = None
taxRate: Optional[decimal.Decimal] = None
salesTax: Optional[decimal.Decimal] = None
taxableTransactionID: Optional[str] = None
transactionID: Optional[str] = None
code: Tuple[enums.Code, ...] = ()
# Type alias to work around https://github.com/python/mypy/issues/1775
_ClientFeesDetail = ClientFeesDetail
| [
[
[
1983,
1994
]
],
[
[
1996,
2003
]
],
[
[
2965,
2973
],
[
3920,
3928
],
[
3946,
3954
],
[
3995,
4003
],
[
8214,
8222
],
[
8261,
8269
],
[
8308,
8316
],
[
9372,
9380
],
[
9415,
9423
],
[
12723,
12731
],
[
12874,
12882
],
[
14049,
14057
],
[
19928,
19936
],
[
21051,
21059
],
[
21094,
21102
],
[
32735,
32743
],
[
32786,
32794
],
[
33478,
33486
],
[
33821,
33829
],
[
35795,
35803
],
[
36493,
36501
],
[
36555,
36563
],
[
37128,
37136
],
[
37598,
37606
],
[
38060,
38068
],
[
38560,
38568
],
[
39480,
39488
],
[
39605,
39613
],
[
39651,
39659
],
[
39697,
39705
],
[
39750,
39758
],
[
40468,
40476
],
[
40593,
40601
],
[
40897,
40905
],
[
41365,
41373
],
[
41932,
41940
],
[
42234,
42242
],
[
42287,
42295
],
[
43344,
43352
],
[
43469,
43477
],
[
43515,
43523
],
[
43561,
43569
],
[
43614,
43622
],
[
44332,
44340
],
[
44457,
44465
],
[
44761,
44769
],
[
45229,
45237
],
[
45796,
45804
],
[
46098,
46106
],
[
46151,
46159
],
[
47239,
47247
],
[
47388,
47396
],
[
49111,
49119
],
[
49656,
49664
],
[
49811,
49819
],
[
49860,
49868
],
[
49911,
49919
],
[
49958,
49966
],
[
50004,
50012
],
[
52008,
52016
],
[
52553,
52561
],
[
52708,
52716
],
[
52757,
52765
],
[
52808,
52816
],
[
52855,
52863
],
[
52901,
52909
],
[
54256,
54264
],
[
54956,
54964
],
[
55018,
55026
],
[
55071,
55079
],
[
55124,
55132
],
[
56211,
56219
],
[
56396,
56404
],
[
56442,
56450
],
[
56488,
56496
],
[
56541,
56549
],
[
57352,
57360
],
[
57558,
57566
],
[
57620,
57628
],
[
57673,
57681
],
[
57726,
57734
],
[
58124,
58132
],
[
58896,
58904
],
[
59301,
59309
],
[
60357,
60365
],
[
60502,
60510
],
[
61919,
61927
],
[
61965,
61973
],
[
62011,
62019
],
[
62064,
62072
],
[
63335,
63343
],
[
63815,
63823
],
[
64021,
64029
],
[
64083,
64091
],
[
64136,
64144
],
[
64189,
64197
],
[
64504,
64512
],
[
64547,
64555
],
[
65235,
65243
],
[
66911,
66919
],
[
67061,
67069
],
[
68242,
68250
],
[
68387,
68395
],
[
69780,
69788
],
[
69930,
69938
],
[
69976,
69984
],
[
71451,
71459
],
[
71534,
71542
],
[
71579,
71587
],
[
72367,
72375
],
[
73611,
73619
],
[
73664,
73672
],
[
74704,
74712
],
[
75102,
75110
],
[
75961,
75969
],
[
76747,
76755
],
[
76898,
76906
],
[
77796,
77804
],
[
78119,
78127
],
[
78382,
78390
],
[
78471,
78479
],
[
79178,
79186
],
[
79226,
79234
],
[
79282,
79290
],
[
79607,
79615
],
[
80205,
80213
],
[
80290,
80298
],
[
80334,
80342
],
[
80903,
80911
],
[
81985,
81993
],
[
82029,
82037
],
[
82630,
82638
],
[
83828,
83836
],
[
83909,
83917
],
[
84465,
84473
],
[
86512,
86520
],
[
86663,
86671
],
[
88096,
88104
],
[
88247,
88255
],
[
88965,
88973
],
[
89737,
89745
],
[
91460,
91468
],
[
91605,
91613
],
[
91744,
91752
],
[
92936,
92944
],
[
93081,
93089
],
[
93220,
93228
],
[
94400,
94408
],
[
94545,
94553
]
],
[
[
2981,
2988
],
[
9465,
9472
],
[
9507,
9514
],
[
9554,
9561
],
[
9611,
9618
],
[
9665,
9672
],
[
9729,
9736
],
[
9787,
9794
],
[
9847,
9854
],
[
9900,
9907
],
[
9956,
9963
],
[
10002,
10009
],
[
10050,
10057
],
[
10103,
10110
],
[
10157,
10164
],
[
10219,
10226
],
[
10282,
10289
],
[
10329,
10336
],
[
10392,
10399
],
[
10442,
10449
],
[
10491,
10498
],
[
10555,
10562
],
[
10604,
10611
],
[
10652,
10659
],
[
10706,
10713
],
[
10756,
10763
],
[
10816,
10823
],
[
10871,
10878
],
[
10924,
10931
],
[
10977,
10984
],
[
11024,
11031
],
[
11074,
11081
],
[
11125,
11132
],
[
11177,
11184
],
[
11234,
11241
],
[
11278,
11285
],
[
11328,
11335
],
[
11370,
11377
],
[
11432,
11439
],
[
11498,
11505
],
[
11550,
11557
],
[
11604,
11611
],
[
11659,
11666
],
[
12633,
12640
],
[
12678,
12685
],
[
12825,
12832
],
[
12928,
12935
],
[
12981,
12988
],
[
13033,
13040
],
[
13082,
13089
],
[
13135,
13142
],
[
13186,
13193
],
[
13236,
13243
],
[
13280,
13287
],
[
13324,
13331
],
[
13414,
13421
],
[
13462,
13469
],
[
13627,
13634
],
[
13716,
13723
],
[
13772,
13779
],
[
14090,
14097
],
[
14137,
14144
],
[
14185,
14192
],
[
14241,
14248
],
[
14301,
14308
],
[
14362,
14369
],
[
14406,
14413
],
[
14454,
14461
],
[
14503,
14510
],
[
14569,
14576
],
[
14639,
14646
],
[
14710,
14717
],
[
14772,
14779
],
[
14838,
14845
],
[
14905,
14912
],
[
14951,
14958
],
[
15001,
15008
],
[
15052,
15059
],
[
15096,
15103
],
[
15144,
15151
],
[
15193,
15200
],
[
15261,
15268
],
[
15333,
15340
],
[
15406,
15413
],
[
15450,
15457
],
[
15498,
15505
],
[
15547,
15554
],
[
15602,
15609
],
[
15661,
15668
],
[
15721,
15728
],
[
15771,
15778
],
[
15825,
15832
],
[
15880,
15887
],
[
15935,
15942
],
[
15994,
16001
],
[
16054,
16061
],
[
16098,
16105
],
[
16146,
16153
],
[
16195,
16202
],
[
16245,
16252
],
[
16299,
16306
],
[
16354,
16361
],
[
16398,
16405
],
[
16446,
16453
],
[
16495,
16502
],
[
16554,
16561
],
[
16617,
16624
],
[
16681,
16688
],
[
16751,
16758
],
[
16809,
16816
],
[
16875,
16882
],
[
16945,
16952
],
[
17016,
17023
],
[
17070,
17077
],
[
17136,
17143
],
[
17206,
17213
],
[
17277,
17284
],
[
17356,
17363
],
[
17439,
17446
],
[
17523,
17530
],
[
17596,
17603
],
[
17673,
17680
],
[
17751,
17758
],
[
17833,
17840
],
[
17919,
17926
],
[
18006,
18013
],
[
18068,
18075
],
[
18131,
18138
],
[
18205,
18212
],
[
18280,
18287
],
[
18338,
18345
],
[
18397,
18404
],
[
18451,
18458
],
[
18509,
18516
],
[
18568,
18575
],
[
18614,
18621
],
[
18664,
18671
],
[
18715,
18722
],
[
19435,
19442
],
[
19480,
19487
],
[
19528,
19535
],
[
19576,
19583
],
[
19624,
19631
],
[
19672,
19679
],
[
19838,
19845
],
[
19883,
19890
],
[
20030,
20037
],
[
20083,
20090
],
[
20148,
20155
],
[
20203,
20210
],
[
20256,
20263
],
[
20321,
20328
],
[
20376,
20383
],
[
20425,
20432
],
[
20477,
20484
],
[
20529,
20536
],
[
20581,
20588
],
[
20633,
20640
],
[
20798,
20805
],
[
21143,
21150
],
[
21197,
21204
],
[
21251,
21258
],
[
21300,
21307
],
[
21352,
21359
],
[
21404,
21411
],
[
21454,
21461
],
[
21507,
21514
],
[
21560,
21567
],
[
21618,
21625
],
[
21679,
21686
],
[
21740,
21747
],
[
21797,
21804
],
[
21857,
21864
],
[
21917,
21924
],
[
21964,
21971
],
[
22014,
22021
],
[
22064,
22071
],
[
22114,
22121
],
[
22167,
22174
],
[
22220,
22227
],
[
22275,
22282
],
[
22333,
22340
],
[
22391,
22398
],
[
22447,
22454
],
[
22506,
22513
],
[
22565,
22572
],
[
22613,
22620
],
[
22664,
22671
],
[
22715,
22722
],
[
22764,
22771
],
[
22816,
22823
],
[
22868,
22875
],
[
22920,
22927
],
[
22972,
22979
],
[
23025,
23032
],
[
23081,
23088
],
[
23137,
23144
],
[
23188,
23195
],
[
23242,
23249
],
[
23296,
23303
],
[
23350,
23357
],
[
23407,
23414
],
[
23464,
23471
],
[
23513,
23520
],
[
23565,
23572
],
[
23617,
23624
],
[
23670,
23677
],
[
23726,
23733
],
[
23782,
23789
],
[
23839,
23846
],
[
23899,
23906
],
[
23959,
23966
],
[
24013,
24020
],
[
24070,
24077
],
[
24127,
24134
],
[
24179,
24186
],
[
24234,
24241
],
[
24289,
24296
],
[
24342,
24349
],
[
24398,
24405
],
[
24454,
24461
],
[
24507,
24514
],
[
24563,
24570
],
[
24619,
24626
],
[
24679,
24686
],
[
24742,
24749
],
[
24805,
24812
],
[
24853,
24860
],
[
24904,
24911
],
[
24955,
24962
],
[
25004,
25011
],
[
25056,
25063
],
[
25108,
25115
],
[
25164,
25171
],
[
25223,
25230
],
[
25282,
25289
],
[
25334,
25341
],
[
25386,
25393
],
[
25439,
25446
],
[
25492,
25499
],
[
25553,
25560
],
[
25614,
25621
],
[
25674,
25681
],
[
25734,
25741
],
[
25784,
25791
],
[
25834,
25841
],
[
25887,
25894
],
[
25940,
25947
],
[
25998,
26005
],
[
26056,
26063
],
[
26115,
26122
],
[
26174,
26181
],
[
26228,
26235
],
[
26285,
26292
],
[
26342,
26349
],
[
26399,
26406
],
[
26456,
26463
],
[
26507,
26514
],
[
26558,
26565
],
[
26622,
26629
],
[
26686,
26693
],
[
26742,
26749
],
[
26798,
26805
],
[
26852,
26859
],
[
26906,
26913
],
[
26963,
26970
],
[
27020,
27027
],
[
27072,
27079
],
[
27124,
27131
],
[
27176,
27183
],
[
27228,
27235
],
[
27284,
27291
],
[
27340,
27347
],
[
27393,
27400
],
[
27446,
27453
],
[
27503,
27510
],
[
27560,
27567
],
[
27620,
27627
],
[
27680,
27687
],
[
27735,
27742
],
[
27790,
27797
],
[
27846,
27853
],
[
27902,
27909
],
[
27958,
27965
],
[
28014,
28021
],
[
28070,
28077
],
[
28126,
28133
],
[
28183,
28190
],
[
28240,
28247
],
[
28305,
28312
],
[
28370,
28377
],
[
28420,
28427
],
[
28470,
28477
],
[
28521,
28528
],
[
28572,
28579
],
[
28695,
28702
],
[
28753,
28760
],
[
28811,
28818
],
[
28865,
28872
],
[
28922,
28929
],
[
28979,
28986
],
[
29036,
29043
],
[
29096,
29103
],
[
29156,
29163
],
[
29217,
29224
],
[
29281,
29288
],
[
29345,
29352
],
[
29394,
29401
],
[
29446,
29453
],
[
29498,
29505
],
[
29548,
29555
],
[
29601,
29608
],
[
29654,
29661
],
[
29707,
29714
],
[
29763,
29770
],
[
29819,
29826
],
[
29873,
29880
],
[
29930,
29937
],
[
29987,
29994
],
[
30049,
30056
],
[
30114,
30121
],
[
30179,
30186
],
[
30226,
30233
],
[
30276,
30283
],
[
30326,
30333
],
[
30370,
30377
],
[
30417,
30424
],
[
30464,
30471
],
[
30560,
30567
],
[
30619,
30626
],
[
30678,
30685
],
[
30737,
30744
],
[
30796,
30803
],
[
30842,
30849
],
[
30891,
30898
],
[
30940,
30947
],
[
30989,
30996
],
[
31038,
31045
],
[
31092,
31099
],
[
31149,
31156
],
[
31206,
31213
],
[
31263,
31270
],
[
31320,
31327
],
[
31374,
31381
],
[
31431,
31438
],
[
31488,
31495
],
[
31545,
31552
],
[
31602,
31609
],
[
31657,
31664
],
[
31715,
31722
],
[
31773,
31780
],
[
31831,
31838
],
[
31889,
31896
],
[
31955,
31962
],
[
32024,
32031
],
[
32093,
32100
],
[
32162,
32169
],
[
32231,
32238
],
[
32469,
32476
],
[
32513,
32520
],
[
32558,
32565
],
[
32875,
32882
],
[
33388,
33395
],
[
33433,
33440
],
[
33580,
33587
],
[
33631,
33638
],
[
33953,
33960
],
[
34002,
34009
],
[
34051,
34058
],
[
34105,
34112
],
[
34152,
34159
],
[
34433,
34440
],
[
34726,
34733
],
[
34777,
34784
],
[
34839,
34846
],
[
34893,
34900
],
[
34948,
34955
],
[
35003,
35010
],
[
35058,
35065
],
[
35119,
35126
],
[
35169,
35176
],
[
35224,
35231
],
[
35372,
35379
],
[
35429,
35436
],
[
35746,
35753
],
[
36045,
36052
],
[
36092,
36099
],
[
36140,
36147
],
[
36192,
36199
],
[
36240,
36247
],
[
36293,
36300
],
[
36346,
36353
],
[
36402,
36409
],
[
37038,
37045
],
[
37083,
37090
],
[
37230,
37237
],
[
37429,
37436
],
[
37493,
37500
],
[
37548,
37555
],
[
37761,
37768
],
[
38187,
38194
],
[
38235,
38242
],
[
38283,
38290
],
[
38332,
38339
],
[
38376,
38383
],
[
38427,
38434
],
[
39133,
39140
],
[
39390,
39397
],
[
39435,
39442
],
[
39830,
39837
],
[
39879,
39886
],
[
39928,
39935
],
[
39972,
39979
],
[
40023,
40030
],
[
40116,
40123
],
[
40168,
40175
],
[
40217,
40224
],
[
40318,
40325
],
[
40363,
40370
],
[
40416,
40423
],
[
40649,
40656
],
[
40703,
40710
],
[
40951,
40958
],
[
41006,
41013
],
[
41053,
41060
],
[
41097,
41104
],
[
41885,
41892
],
[
42338,
42345
],
[
42503,
42510
],
[
42997,
43004
],
[
43254,
43261
],
[
43299,
43306
],
[
43694,
43701
],
[
43743,
43750
],
[
43792,
43799
],
[
43836,
43843
],
[
43887,
43894
],
[
43980,
43987
],
[
44032,
44039
],
[
44081,
44088
],
[
44182,
44189
],
[
44227,
44234
],
[
44280,
44287
],
[
44513,
44520
],
[
44567,
44574
],
[
44815,
44822
],
[
44870,
44877
],
[
44917,
44924
],
[
44961,
44968
],
[
45749,
45756
],
[
46565,
46572
],
[
47149,
47156
],
[
47194,
47201
],
[
47341,
47348
],
[
47472,
47479
],
[
47516,
47523
],
[
47645,
47652
],
[
47705,
47712
],
[
47764,
47771
],
[
47828,
47835
],
[
47891,
47898
],
[
47956,
47963
],
[
48021,
48028
],
[
48086,
48093
],
[
48133,
48140
],
[
48177,
48184
],
[
49021,
49028
],
[
49066,
49073
],
[
49213,
49220
],
[
49347,
49354
],
[
49604,
49611
],
[
50128,
50135
],
[
50172,
50179
],
[
50217,
50224
],
[
50264,
50271
],
[
50313,
50320
],
[
50377,
50384
],
[
50440,
50447
],
[
50508,
50515
],
[
50575,
50582
],
[
50644,
50651
],
[
50698,
50705
],
[
50785,
50792
],
[
51071,
51078
],
[
51918,
51925
],
[
51963,
51970
],
[
52110,
52117
],
[
52244,
52251
],
[
52501,
52508
],
[
53025,
53032
],
[
53069,
53076
],
[
53114,
53121
],
[
53161,
53168
],
[
53210,
53217
],
[
53274,
53281
],
[
53337,
53344
],
[
53405,
53412
],
[
53472,
53479
],
[
53541,
53548
],
[
53595,
53602
],
[
53682,
53689
],
[
53968,
53975
],
[
54014,
54021
],
[
54063,
54070
],
[
54114,
54121
],
[
54201,
54208
],
[
54303,
54310
],
[
54347,
54354
],
[
54443,
54450
],
[
54575,
54582
],
[
54629,
54636
],
[
54673,
54680
],
[
54718,
54725
],
[
55178,
55185
],
[
55233,
55240
],
[
55710,
55717
],
[
56121,
56128
],
[
56166,
56173
],
[
56313,
56320
],
[
56621,
56628
],
[
56670,
56677
],
[
56719,
56726
],
[
56766,
56773
],
[
56810,
56817
],
[
56861,
56868
],
[
56954,
56961
],
[
57003,
57010
],
[
57104,
57111
],
[
57158,
57165
],
[
57202,
57209
],
[
57247,
57254
],
[
57300,
57307
],
[
57857,
57864
],
[
57924,
57931
],
[
57970,
57977
],
[
58077,
58084
],
[
58190,
58197
],
[
58258,
58265
],
[
58303,
58310
],
[
58357,
58364
],
[
58406,
58413
],
[
58469,
58476
],
[
58950,
58957
],
[
59005,
59012
],
[
59158,
59165
],
[
59463,
59470
],
[
59856,
59863
],
[
60267,
60274
],
[
60312,
60319
],
[
60459,
60466
],
[
60547,
60554
],
[
60596,
60603
],
[
60644,
60651
],
[
60691,
60698
],
[
60746,
60753
],
[
60794,
60801
],
[
60844,
60851
],
[
60888,
60895
],
[
60933,
60940
],
[
61628,
61635
],
[
62144,
62151
],
[
62193,
62200
],
[
62242,
62249
],
[
62286,
62293
],
[
62337,
62344
],
[
62433,
62440
],
[
62534,
62541
],
[
62588,
62595
],
[
62633,
62640
],
[
62769,
62776
],
[
62821,
62828
],
[
62873,
62880
],
[
62931,
62938
],
[
62983,
62990
],
[
63032,
63039
],
[
63290,
63297
],
[
63437,
63444
],
[
63484,
63491
],
[
63528,
63535
],
[
63574,
63581
],
[
63627,
63634
],
[
64606,
64613
],
[
64660,
64667
],
[
64714,
64721
],
[
64773,
64780
],
[
64893,
64900
],
[
65148,
65155
],
[
65324,
65331
],
[
65382,
65389
],
[
65441,
65448
],
[
65494,
65501
],
[
65547,
65554
],
[
65590,
65597
],
[
65647,
65654
],
[
65705,
65712
],
[
65757,
65764
],
[
65809,
65816
],
[
66269,
66276
],
[
66821,
66828
],
[
66866,
66873
],
[
67013,
67020
],
[
67106,
67113
],
[
67150,
67157
],
[
67194,
67201
],
[
67246,
67253
],
[
67294,
67301
],
[
67741,
67748
],
[
68152,
68159
],
[
68197,
68204
],
[
68344,
68351
],
[
68587,
68594
],
[
68633,
68640
],
[
68688,
68695
],
[
68739,
68746
],
[
68792,
68799
],
[
68847,
68854
],
[
69690,
69697
],
[
69735,
69742
],
[
69882,
69889
],
[
70096,
70103
],
[
70151,
70158
],
[
70197,
70204
],
[
70239,
70246
],
[
70289,
70296
],
[
70340,
70347
],
[
70391,
70398
],
[
70443,
70450
],
[
70494,
70501
],
[
70547,
70554
],
[
70596,
70603
],
[
71058,
71065
],
[
71662,
71669
],
[
71714,
71721
],
[
71767,
71774
],
[
71826,
71833
],
[
71880,
71887
],
[
71931,
71938
],
[
72277,
72284
],
[
72322,
72329
],
[
72469,
72476
],
[
72589,
72596
],
[
72643,
72650
],
[
72687,
72694
],
[
72892,
72899
],
[
73254,
73261
],
[
73742,
73749
],
[
73791,
73798
],
[
73841,
73848
],
[
73897,
73904
],
[
74225,
74232
],
[
74661,
74668
],
[
74746,
74753
],
[
75012,
75019
],
[
75057,
75064
],
[
75204,
75211
],
[
75484,
75491
],
[
76008,
76015
],
[
76055,
76062
],
[
76109,
76116
],
[
76163,
76170
],
[
76207,
76214
],
[
76252,
76259
],
[
76657,
76664
],
[
76702,
76709
],
[
76849,
76856
],
[
76943,
76950
],
[
76987,
76994
],
[
77351,
77358
],
[
77749,
77756
],
[
78029,
78036
],
[
78074,
78081
],
[
78221,
78228
],
[
78742,
78749
],
[
79043,
79050
],
[
79411,
79418
],
[
79744,
79751
],
[
80379,
80386
],
[
80421,
80428
],
[
80463,
80470
],
[
80511,
80518
],
[
80561,
80568
],
[
80609,
80616
],
[
80813,
80820
],
[
80858,
80865
],
[
81005,
81012
],
[
81518,
81525
],
[
82074,
82081
],
[
82116,
82123
],
[
82158,
82165
],
[
82206,
82213
],
[
82256,
82263
],
[
82304,
82311
],
[
82540,
82547
],
[
82585,
82592
],
[
82732,
82739
],
[
83033,
83040
],
[
83738,
83745
],
[
83783,
83790
],
[
84191,
84198
],
[
84582,
84589
],
[
85259,
85266
],
[
85312,
85319
],
[
85367,
85374
],
[
85420,
85427
],
[
85475,
85482
],
[
85530,
85537
],
[
85583,
85590
],
[
85640,
85647
],
[
85691,
85698
],
[
85758,
85765
],
[
85815,
85822
],
[
85884,
85891
],
[
85943,
85950
],
[
86002,
86009
],
[
86051,
86058
],
[
86104,
86111
],
[
86169,
86176
],
[
86224,
86231
],
[
86422,
86429
],
[
86467,
86474
],
[
86614,
86621
],
[
86718,
86725
],
[
86773,
86780
],
[
86830,
86837
],
[
86885,
86892
],
[
86931,
86938
],
[
87134,
87141
],
[
88006,
88013
],
[
88051,
88058
],
[
88198,
88205
],
[
88294,
88301
],
[
88347,
88354
],
[
88396,
88403
],
[
88444,
88451
],
[
88609,
88616
],
[
88888,
88895
],
[
89094,
89101
],
[
89140,
89147
],
[
89182,
89189
],
[
89234,
89241
],
[
89286,
89293
],
[
89334,
89341
],
[
89694,
89701
],
[
89857,
89864
],
[
89911,
89918
],
[
89971,
89978
],
[
90024,
90031
],
[
90088,
90095
],
[
90153,
90160
],
[
90218,
90225
],
[
90283,
90290
],
[
90330,
90337
],
[
90377,
90384
],
[
90430,
90437
],
[
90514,
90521
],
[
90765,
90772
],
[
91370,
91377
],
[
91415,
91422
],
[
91562,
91569
],
[
91695,
91702
],
[
91790,
91797
],
[
91873,
91880
],
[
92241,
92248
],
[
92846,
92853
],
[
92891,
92898
],
[
93038,
93045
],
[
93171,
93178
],
[
93266,
93273
],
[
93349,
93356
],
[
93705,
93712
],
[
94310,
94317
],
[
94355,
94362
],
[
94502,
94509
],
[
94740,
94747
],
[
94786,
94793
],
[
94833,
94840
]
],
[
[
3013,
3022
],
[
3096,
3105
],
[
3348,
3357
],
[
3789,
3798
],
[
7662,
7671
],
[
9152,
9161
],
[
11787,
11796
],
[
13799,
13808
],
[
18742,
18751
],
[
20858,
20867
],
[
32258,
32267
],
[
34493,
34502
],
[
35456,
35465
],
[
37821,
37830
],
[
38697,
38706
],
[
42563,
42572
],
[
46180,
46189
],
[
48204,
48213
],
[
51098,
51107
],
[
55260,
55269
],
[
59490,
59499
],
[
61159,
61168
],
[
64299,
64308
],
[
64920,
64929
],
[
65942,
65951
],
[
67427,
67436
],
[
68874,
68883
],
[
70729,
70738
],
[
72952,
72961
],
[
73964,
73973
],
[
75231,
75240
],
[
77054,
77063
],
[
78802,
78811
],
[
79470,
79479
],
[
81293,
81302
],
[
83094,
83103
],
[
84340,
84349
],
[
84609,
84618
],
[
87194,
87203
],
[
88669,
88678
],
[
89468,
89477
],
[
90541,
90550
],
[
92011,
92020
],
[
93487,
93496
]
],
[
[
3024,
3031
],
[
3233,
3240
]
],
[
[
3051,
3056
],
[
3488,
3493
],
[
4141,
4146
],
[
4209,
4214
],
[
4296,
4301
],
[
4372,
4377
],
[
4457,
4462
],
[
4540,
4545
],
[
4574,
4579
],
[
4642,
4647
],
[
4702,
4707
],
[
4763,
4768
],
[
4821,
4826
],
[
4857,
4862
],
[
4926,
4931
],
[
4982,
4987
],
[
5020,
5025
],
[
5074,
5079
],
[
5100,
5105
],
[
5206,
5211
],
[
5245,
5250
],
[
5298,
5303
],
[
5337,
5342
],
[
5380,
5385
],
[
5424,
5429
],
[
5467,
5472
],
[
5536,
5541
],
[
5598,
5603
],
[
5643,
5648
],
[
5704,
5709
],
[
5755,
5760
],
[
5806,
5811
],
[
5868,
5873
],
[
5922,
5927
],
[
5963,
5968
],
[
6014,
6019
],
[
6058,
6063
],
[
6099,
6104
],
[
6168,
6173
],
[
6231,
6236
],
[
6300,
6305
],
[
6333,
6338
],
[
6371,
6376
],
[
6415,
6420
],
[
6456,
6461
],
[
6514,
6519
],
[
6584,
6589
],
[
6643,
6648
],
[
6696,
6701
],
[
6758,
6763
],
[
6800,
6805
],
[
6845,
6850
],
[
8034,
8039
],
[
8079,
8084
],
[
13358,
13363
],
[
36750,
36755
],
[
38461,
38466
],
[
40252,
40257
],
[
44116,
44121
],
[
50819,
50824
],
[
53716,
53721
],
[
57038,
57043
],
[
59112,
59117
],
[
62468,
62473
],
[
65843,
65848
],
[
67328,
67333
],
[
70630,
70635
],
[
71965,
71970
],
[
76387,
76392
],
[
78289,
78294
],
[
80643,
80648
],
[
82338,
82343
],
[
84225,
84230
],
[
86965,
86970
],
[
91940,
91945
],
[
93416,
93421
],
[
94954,
94959
]
],
[
[
3058,
3066
],
[
4037,
4045
],
[
4093,
4101
],
[
7777,
7785
],
[
7813,
7821
],
[
7845,
7853
],
[
7880,
7888
],
[
7911,
7919
],
[
7949,
7957
],
[
7988,
7996
],
[
8123,
8131
],
[
8168,
8176
],
[
8205,
8213
],
[
8252,
8260
],
[
8299,
8307
],
[
8342,
8350
],
[
8376,
8384
],
[
8407,
8415
],
[
8439,
8447
],
[
8473,
8481
],
[
8510,
8518
],
[
8561,
8569
],
[
8613,
8621
],
[
8662,
8670
],
[
8712,
8720
],
[
8764,
8772
],
[
8819,
8827
],
[
8856,
8864
],
[
8891,
8899
],
[
8930,
8938
],
[
8971,
8979
],
[
9013,
9021
],
[
9260,
9268
],
[
9296,
9304
],
[
9328,
9336
],
[
9363,
9371
],
[
9406,
9414
],
[
9456,
9464
],
[
9498,
9506
],
[
9545,
9553
],
[
9602,
9610
],
[
9656,
9664
],
[
9720,
9728
],
[
9778,
9786
],
[
9838,
9846
],
[
9891,
9899
],
[
9947,
9955
],
[
9993,
10001
],
[
10041,
10049
],
[
10094,
10102
],
[
10148,
10156
],
[
10210,
10218
],
[
10273,
10281
],
[
10320,
10328
],
[
10383,
10391
],
[
10433,
10441
],
[
10482,
10490
],
[
10546,
10554
],
[
10595,
10603
],
[
10643,
10651
],
[
10697,
10705
],
[
10747,
10755
],
[
10807,
10815
],
[
10862,
10870
],
[
10915,
10923
],
[
10968,
10976
],
[
11015,
11023
],
[
11065,
11073
],
[
11116,
11124
],
[
11168,
11176
],
[
11225,
11233
],
[
11269,
11277
],
[
11319,
11327
],
[
11361,
11369
],
[
11423,
11431
],
[
11489,
11497
],
[
11541,
11549
],
[
11595,
11603
],
[
11650,
11658
],
[
11935,
11943
],
[
11984,
11992
],
[
12020,
12028
],
[
12052,
12060
],
[
12085,
12093
],
[
12123,
12131
],
[
12155,
12163
],
[
12192,
12200
],
[
12233,
12241
],
[
12265,
12273
],
[
12296,
12304
],
[
12328,
12336
],
[
12370,
12378
],
[
12412,
12420
],
[
12455,
12463
],
[
12502,
12510
],
[
12554,
12562
],
[
12587,
12595
],
[
12624,
12632
],
[
12669,
12677
],
[
12714,
12722
],
[
12758,
12766
],
[
12816,
12824
],
[
12865,
12873
],
[
12919,
12927
],
[
12972,
12980
],
[
13024,
13032
],
[
13073,
13081
],
[
13126,
13134
],
[
13177,
13185
],
[
13227,
13235
],
[
13271,
13279
],
[
13315,
13323
],
[
13405,
13413
],
[
13453,
13461
],
[
13504,
13512
],
[
13543,
13551
],
[
13583,
13591
],
[
13618,
13626
],
[
13663,
13671
],
[
13707,
13715
],
[
13763,
13771
],
[
13935,
13943
],
[
13971,
13979
],
[
14003,
14011
],
[
14040,
14048
],
[
14081,
14089
],
[
14128,
14136
],
[
14176,
14184
],
[
14232,
14240
],
[
14292,
14300
],
[
14353,
14361
],
[
14397,
14405
],
[
14445,
14453
],
[
14494,
14502
],
[
14560,
14568
],
[
14630,
14638
],
[
14701,
14709
],
[
14763,
14771
],
[
14829,
14837
],
[
14896,
14904
],
[
14942,
14950
],
[
14992,
15000
],
[
15043,
15051
],
[
15087,
15095
],
[
15135,
15143
],
[
15184,
15192
],
[
15252,
15260
],
[
15324,
15332
],
[
15397,
15405
],
[
15441,
15449
],
[
15489,
15497
],
[
15538,
15546
],
[
15593,
15601
],
[
15652,
15660
],
[
15712,
15720
],
[
15762,
15770
],
[
15816,
15824
],
[
15871,
15879
],
[
15926,
15934
],
[
15985,
15993
],
[
16045,
16053
],
[
16089,
16097
],
[
16137,
16145
],
[
16186,
16194
],
[
16236,
16244
],
[
16290,
16298
],
[
16345,
16353
],
[
16389,
16397
],
[
16437,
16445
],
[
16486,
16494
],
[
16545,
16553
],
[
16608,
16616
],
[
16672,
16680
],
[
16742,
16750
],
[
16800,
16808
],
[
16866,
16874
],
[
16936,
16944
],
[
17007,
17015
],
[
17061,
17069
],
[
17127,
17135
],
[
17197,
17205
],
[
17268,
17276
],
[
17347,
17355
],
[
17430,
17438
],
[
17514,
17522
],
[
17587,
17595
],
[
17664,
17672
],
[
17742,
17750
],
[
17824,
17832
],
[
17910,
17918
],
[
17997,
18005
],
[
18059,
18067
],
[
18122,
18130
],
[
18196,
18204
],
[
18271,
18279
],
[
18329,
18337
],
[
18388,
18396
],
[
18442,
18450
],
[
18500,
18508
],
[
18559,
18567
],
[
18605,
18613
],
[
18655,
18663
],
[
18706,
18714
],
[
18890,
18898
],
[
18939,
18947
],
[
18975,
18983
],
[
19007,
19015
],
[
19040,
19048
],
[
19078,
19086
],
[
19110,
19118
],
[
19147,
19155
],
[
19179,
19187
],
[
19210,
19218
],
[
19252,
19260
],
[
19294,
19302
],
[
19341,
19349
],
[
19393,
19401
],
[
19426,
19434
],
[
19471,
19479
],
[
19519,
19527
],
[
19567,
19575
],
[
19615,
19623
],
[
19663,
19671
],
[
19716,
19724
],
[
19759,
19767
],
[
19792,
19800
],
[
19829,
19837
],
[
19874,
19882
],
[
19919,
19927
],
[
19963,
19971
],
[
20021,
20029
],
[
20074,
20082
],
[
20139,
20147
],
[
20194,
20202
],
[
20247,
20255
],
[
20312,
20320
],
[
20367,
20375
],
[
20416,
20424
],
[
20468,
20476
],
[
20520,
20528
],
[
20572,
20580
],
[
20624,
20632
],
[
20675,
20683
],
[
20714,
20722
],
[
20754,
20762
],
[
20789,
20797
],
[
20834,
20842
],
[
20972,
20980
],
[
21007,
21015
],
[
21042,
21050
],
[
21085,
21093
],
[
21134,
21142
],
[
21188,
21196
],
[
21242,
21250
],
[
21291,
21299
],
[
21343,
21351
],
[
21395,
21403
],
[
21445,
21453
],
[
21498,
21506
],
[
21551,
21559
],
[
21609,
21617
],
[
21670,
21678
],
[
21731,
21739
],
[
21788,
21796
],
[
21848,
21856
],
[
21908,
21916
],
[
21955,
21963
],
[
22005,
22013
],
[
22055,
22063
],
[
22105,
22113
],
[
22158,
22166
],
[
22211,
22219
],
[
22266,
22274
],
[
22324,
22332
],
[
22382,
22390
],
[
22438,
22446
],
[
22497,
22505
],
[
22556,
22564
],
[
22604,
22612
],
[
22655,
22663
],
[
22706,
22714
],
[
22755,
22763
],
[
22807,
22815
],
[
22859,
22867
],
[
22911,
22919
],
[
22963,
22971
],
[
23016,
23024
],
[
23072,
23080
],
[
23128,
23136
],
[
23179,
23187
],
[
23233,
23241
],
[
23287,
23295
],
[
23341,
23349
],
[
23398,
23406
],
[
23455,
23463
],
[
23504,
23512
],
[
23556,
23564
],
[
23608,
23616
],
[
23661,
23669
],
[
23717,
23725
],
[
23773,
23781
],
[
23830,
23838
],
[
23890,
23898
],
[
23950,
23958
],
[
24004,
24012
],
[
24061,
24069
],
[
24118,
24126
],
[
24170,
24178
],
[
24225,
24233
],
[
24280,
24288
],
[
24333,
24341
],
[
24389,
24397
],
[
24445,
24453
],
[
24498,
24506
],
[
24554,
24562
],
[
24610,
24618
],
[
24670,
24678
],
[
24733,
24741
],
[
24796,
24804
],
[
24844,
24852
],
[
24895,
24903
],
[
24946,
24954
],
[
24995,
25003
],
[
25047,
25055
],
[
25099,
25107
],
[
25155,
25163
],
[
25214,
25222
],
[
25273,
25281
],
[
25325,
25333
],
[
25377,
25385
],
[
25430,
25438
],
[
25483,
25491
],
[
25544,
25552
],
[
25605,
25613
],
[
25665,
25673
],
[
25725,
25733
],
[
25775,
25783
],
[
25825,
25833
],
[
25878,
25886
],
[
25931,
25939
],
[
25989,
25997
],
[
26047,
26055
],
[
26106,
26114
],
[
26165,
26173
],
[
26219,
26227
],
[
26276,
26284
],
[
26333,
26341
],
[
26390,
26398
],
[
26447,
26455
],
[
26498,
26506
],
[
26549,
26557
],
[
26613,
26621
],
[
26677,
26685
],
[
26733,
26741
],
[
26789,
26797
],
[
26843,
26851
],
[
26897,
26905
],
[
26954,
26962
],
[
27011,
27019
],
[
27063,
27071
],
[
27115,
27123
],
[
27167,
27175
],
[
27219,
27227
],
[
27275,
27283
],
[
27331,
27339
],
[
27384,
27392
],
[
27437,
27445
],
[
27494,
27502
],
[
27551,
27559
],
[
27611,
27619
],
[
27671,
27679
],
[
27726,
27734
],
[
27781,
27789
],
[
27837,
27845
],
[
27893,
27901
],
[
27949,
27957
],
[
28005,
28013
],
[
28061,
28069
],
[
28117,
28125
],
[
28174,
28182
],
[
28231,
28239
],
[
28296,
28304
],
[
28361,
28369
],
[
28411,
28419
],
[
28461,
28469
],
[
28512,
28520
],
[
28563,
28571
],
[
28611,
28619
],
[
28643,
28651
],
[
28686,
28694
],
[
28744,
28752
],
[
28802,
28810
],
[
28856,
28864
],
[
28913,
28921
],
[
28970,
28978
],
[
29027,
29035
],
[
29087,
29095
],
[
29147,
29155
],
[
29208,
29216
],
[
29272,
29280
],
[
29336,
29344
],
[
29385,
29393
],
[
29437,
29445
],
[
29489,
29497
],
[
29539,
29547
],
[
29592,
29600
],
[
29645,
29653
],
[
29698,
29706
],
[
29754,
29762
],
[
29810,
29818
],
[
29864,
29872
],
[
29921,
29929
],
[
29978,
29986
],
[
30040,
30048
],
[
30105,
30113
],
[
30170,
30178
],
[
30217,
30225
],
[
30267,
30275
],
[
30317,
30325
],
[
30361,
30369
],
[
30408,
30416
],
[
30455,
30463
],
[
30507,
30515
],
[
30551,
30559
],
[
30610,
30618
],
[
30669,
30677
],
[
30728,
30736
],
[
30787,
30795
],
[
30833,
30841
],
[
30882,
30890
],
[
30931,
30939
],
[
30980,
30988
],
[
31029,
31037
],
[
31083,
31091
],
[
31140,
31148
],
[
31197,
31205
],
[
31254,
31262
],
[
31311,
31319
],
[
31365,
31373
],
[
31422,
31430
],
[
31479,
31487
],
[
31536,
31544
],
[
31593,
31601
],
[
31648,
31656
],
[
31706,
31714
],
[
31764,
31772
],
[
31822,
31830
],
[
31880,
31888
],
[
31946,
31954
],
[
32015,
32023
],
[
32084,
32092
],
[
32153,
32161
],
[
32222,
32230
],
[
32377,
32385
],
[
32426,
32434
],
[
32460,
32468
],
[
32504,
32512
],
[
32549,
32557
],
[
32596,
32604
],
[
32630,
32638
],
[
32726,
32734
],
[
32777,
32785
],
[
32833,
32841
],
[
32866,
32874
],
[
32912,
32920
],
[
32948,
32956
],
[
32980,
32988
],
[
33013,
33021
],
[
33051,
33059
],
[
33083,
33091
],
[
33120,
33128
],
[
33161,
33169
],
[
33193,
33201
],
[
33224,
33232
],
[
33266,
33274
],
[
33309,
33317
],
[
33342,
33350
],
[
33379,
33387
],
[
33424,
33432
],
[
33469,
33477
],
[
33513,
33521
],
[
33571,
33579
],
[
33622,
33630
],
[
33676,
33684
],
[
33723,
33731
],
[
33775,
33783
],
[
33812,
33820
],
[
33861,
33869
],
[
33904,
33912
],
[
33944,
33952
],
[
33993,
34001
],
[
34042,
34050
],
[
34096,
34104
],
[
34143,
34151
],
[
34191,
34199
],
[
34231,
34239
],
[
34271,
34279
],
[
34310,
34318
],
[
34349,
34357
],
[
34389,
34397
],
[
34424,
34432
],
[
34469,
34477
],
[
34626,
34634
],
[
34674,
34682
],
[
34717,
34725
],
[
34768,
34776
],
[
34830,
34838
],
[
34884,
34892
],
[
34939,
34947
],
[
34994,
35002
],
[
35049,
35057
],
[
35110,
35118
],
[
35160,
35168
],
[
35215,
35223
],
[
35263,
35271
],
[
35299,
35307
],
[
35331,
35339
],
[
35363,
35371
],
[
35420,
35428
],
[
35562,
35570
],
[
35614,
35622
],
[
35663,
35671
],
[
35698,
35706
],
[
35737,
35745
],
[
35786,
35794
],
[
35829,
35837
],
[
35867,
35875
],
[
35899,
35907
],
[
35936,
35944
],
[
35968,
35976
],
[
35999,
36007
],
[
36036,
36044
],
[
36083,
36091
],
[
36131,
36139
],
[
36183,
36191
],
[
36231,
36239
],
[
36284,
36292
],
[
36337,
36345
],
[
36393,
36401
],
[
36445,
36453
],
[
36484,
36492
],
[
36546,
36554
],
[
36601,
36609
],
[
36634,
36642
],
[
36676,
36684
],
[
36719,
36727
],
[
36802,
36810
],
[
36853,
36861
],
[
36890,
36898
],
[
36926,
36934
],
[
36958,
36966
],
[
36990,
36998
],
[
37029,
37037
],
[
37074,
37082
],
[
37119,
37127
],
[
37163,
37171
],
[
37221,
37229
],
[
37275,
37283
],
[
37322,
37330
],
[
37374,
37382
],
[
37420,
37428
],
[
37484,
37492
],
[
37539,
37547
],
[
37589,
37597
],
[
37638,
37646
],
[
37677,
37685
],
[
37717,
37725
],
[
37752,
37760
],
[
37797,
37805
],
[
37965,
37973
],
[
38014,
38022
],
[
38051,
38059
],
[
38106,
38114
],
[
38143,
38151
],
[
38178,
38186
],
[
38226,
38234
],
[
38274,
38282
],
[
38323,
38331
],
[
38367,
38375
],
[
38418,
38426
],
[
38509,
38517
],
[
38551,
38559
],
[
38605,
38613
],
[
38641,
38649
],
[
38673,
38681
],
[
38800,
38808
],
[
38857,
38865
],
[
38903,
38911
],
[
38949,
38957
],
[
39001,
39009
],
[
39050,
39058
],
[
39085,
39093
],
[
39124,
39132
],
[
39169,
39177
],
[
39207,
39215
],
[
39239,
39247
],
[
39271,
39279
],
[
39302,
39310
],
[
39344,
39352
],
[
39381,
39389
],
[
39426,
39434
],
[
39471,
39479
],
[
39515,
39523
],
[
39559,
39567
],
[
39596,
39604
],
[
39642,
39650
],
[
39688,
39696
],
[
39741,
39749
],
[
39786,
39794
],
[
39821,
39829
],
[
39870,
39878
],
[
39919,
39927
],
[
39963,
39971
],
[
40014,
40022
],
[
40073,
40081
],
[
40107,
40115
],
[
40159,
40167
],
[
40208,
40216
],
[
40309,
40317
],
[
40354,
40362
],
[
40407,
40415
],
[
40459,
40467
],
[
40507,
40515
],
[
40545,
40553
],
[
40584,
40592
],
[
40640,
40648
],
[
40694,
40702
],
[
40746,
40754
],
[
40782,
40790
],
[
40888,
40896
],
[
40942,
40950
],
[
40997,
41005
],
[
41044,
41052
],
[
41088,
41096
],
[
41141,
41149
],
[
41308,
41316
],
[
41356,
41364
],
[
41405,
41413
],
[
41448,
41456
],
[
41489,
41497
],
[
41535,
41543
],
[
41573,
41581
],
[
41609,
41617
],
[
41644,
41652
],
[
41681,
41689
],
[
41718,
41726
],
[
41750,
41758
],
[
41787,
41795
],
[
41828,
41836
],
[
41876,
41884
],
[
41923,
41931
],
[
41979,
41987
],
[
42026,
42034
],
[
42069,
42077
],
[
42121,
42129
],
[
42154,
42162
],
[
42186,
42194
],
[
42225,
42233
],
[
42278,
42286
],
[
42329,
42337
],
[
42380,
42388
],
[
42419,
42427
],
[
42459,
42467
],
[
42494,
42502
],
[
42539,
42547
],
[
42664,
42672
],
[
42721,
42729
],
[
42767,
42775
],
[
42813,
42821
],
[
42865,
42873
],
[
42914,
42922
],
[
42949,
42957
],
[
42988,
42996
],
[
43033,
43041
],
[
43071,
43079
],
[
43103,
43111
],
[
43135,
43143
],
[
43166,
43174
],
[
43208,
43216
],
[
43245,
43253
],
[
43290,
43298
],
[
43335,
43343
],
[
43379,
43387
],
[
43423,
43431
],
[
43460,
43468
],
[
43506,
43514
],
[
43552,
43560
],
[
43605,
43613
],
[
43650,
43658
],
[
43685,
43693
],
[
43734,
43742
],
[
43783,
43791
],
[
43827,
43835
],
[
43878,
43886
],
[
43937,
43945
],
[
43971,
43979
],
[
44023,
44031
],
[
44072,
44080
],
[
44173,
44181
],
[
44218,
44226
],
[
44271,
44279
],
[
44323,
44331
],
[
44371,
44379
],
[
44409,
44417
],
[
44448,
44456
],
[
44504,
44512
],
[
44558,
44566
],
[
44610,
44618
],
[
44646,
44654
],
[
44752,
44760
],
[
44806,
44814
],
[
44861,
44869
],
[
44908,
44916
],
[
44952,
44960
],
[
45005,
45013
],
[
45172,
45180
],
[
45220,
45228
],
[
45269,
45277
],
[
45312,
45320
],
[
45353,
45361
],
[
45399,
45407
],
[
45437,
45445
],
[
45473,
45481
],
[
45508,
45516
],
[
45545,
45553
],
[
45582,
45590
],
[
45614,
45622
],
[
45651,
45659
],
[
45692,
45700
],
[
45740,
45748
],
[
45787,
45795
],
[
45843,
45851
],
[
45890,
45898
],
[
45933,
45941
],
[
45985,
45993
],
[
46018,
46026
],
[
46050,
46058
],
[
46089,
46097
],
[
46142,
46150
],
[
46315,
46323
],
[
46365,
46373
],
[
46414,
46422
],
[
46450,
46458
],
[
46482,
46490
],
[
46517,
46525
],
[
46556,
46564
],
[
46601,
46609
],
[
46639,
46647
],
[
46671,
46679
],
[
46708,
46716
],
[
46749,
46757
],
[
46781,
46789
],
[
46812,
46820
],
[
46844,
46852
],
[
46886,
46894
],
[
46928,
46936
],
[
46971,
46979
],
[
47018,
47026
],
[
47070,
47078
],
[
47103,
47111
],
[
47140,
47148
],
[
47185,
47193
],
[
47230,
47238
],
[
47274,
47282
],
[
47332,
47340
],
[
47379,
47387
],
[
47428,
47436
],
[
47463,
47471
],
[
47507,
47515
],
[
47553,
47561
],
[
47594,
47602
],
[
47636,
47644
],
[
47696,
47704
],
[
47755,
47763
],
[
47819,
47827
],
[
47882,
47890
],
[
47947,
47955
],
[
48012,
48020
],
[
48077,
48085
],
[
48124,
48132
],
[
48168,
48176
],
[
48316,
48324
],
[
48352,
48360
],
[
48384,
48392
],
[
48419,
48427
],
[
48459,
48467
],
[
48505,
48513
],
[
48543,
48551
],
[
48575,
48583
],
[
48612,
48620
],
[
48653,
48661
],
[
48685,
48693
],
[
48716,
48724
],
[
48758,
48766
],
[
48800,
48808
],
[
48843,
48851
],
[
48890,
48898
],
[
48942,
48950
],
[
48975,
48983
],
[
49012,
49020
],
[
49057,
49065
],
[
49102,
49110
],
[
49146,
49154
],
[
49204,
49212
],
[
49258,
49266
],
[
49304,
49312
],
[
49338,
49346
],
[
49383,
49391
],
[
49426,
49434
],
[
49467,
49475
],
[
49513,
49521
],
[
49554,
49562
],
[
49595,
49603
],
[
49647,
49655
],
[
49695,
49703
],
[
49802,
49810
],
[
49851,
49859
],
[
49902,
49910
],
[
49949,
49957
],
[
49995,
50003
],
[
50040,
50048
],
[
50074,
50082
],
[
50119,
50127
],
[
50163,
50171
],
[
50208,
50216
],
[
50255,
50263
],
[
50304,
50312
],
[
50368,
50376
],
[
50431,
50439
],
[
50499,
50507
],
[
50566,
50574
],
[
50635,
50643
],
[
50689,
50697
],
[
50746,
50754
],
[
50776,
50784
],
[
50862,
50870
],
[
50914,
50922
],
[
50949,
50957
],
[
50986,
50994
],
[
51025,
51033
],
[
51062,
51070
],
[
51213,
51221
],
[
51249,
51257
],
[
51281,
51289
],
[
51316,
51324
],
[
51356,
51364
],
[
51402,
51410
],
[
51440,
51448
],
[
51472,
51480
],
[
51509,
51517
],
[
51550,
51558
],
[
51582,
51590
],
[
51613,
51621
],
[
51655,
51663
],
[
51697,
51705
],
[
51740,
51748
],
[
51787,
51795
],
[
51839,
51847
],
[
51872,
51880
],
[
51909,
51917
],
[
51954,
51962
],
[
51999,
52007
],
[
52043,
52051
],
[
52101,
52109
],
[
52155,
52163
],
[
52201,
52209
],
[
52235,
52243
],
[
52280,
52288
],
[
52323,
52331
],
[
52364,
52372
],
[
52410,
52418
],
[
52451,
52459
],
[
52492,
52500
],
[
52544,
52552
],
[
52592,
52600
],
[
52699,
52707
],
[
52748,
52756
],
[
52799,
52807
],
[
52846,
52854
],
[
52892,
52900
],
[
52937,
52945
],
[
52971,
52979
],
[
53016,
53024
],
[
53060,
53068
],
[
53105,
53113
],
[
53152,
53160
],
[
53201,
53209
],
[
53265,
53273
],
[
53328,
53336
],
[
53396,
53404
],
[
53463,
53471
],
[
53532,
53540
],
[
53586,
53594
],
[
53643,
53651
],
[
53673,
53681
],
[
53759,
53767
],
[
53811,
53819
],
[
53846,
53854
],
[
53883,
53891
],
[
53922,
53930
],
[
53959,
53967
],
[
54005,
54013
],
[
54054,
54062
],
[
54105,
54113
],
[
54153,
54161
],
[
54192,
54200
],
[
54247,
54255
],
[
54294,
54302
],
[
54338,
54346
],
[
54397,
54405
],
[
54434,
54442
],
[
54491,
54499
],
[
54535,
54543
],
[
54566,
54574
],
[
54620,
54628
],
[
54664,
54672
],
[
54709,
54717
],
[
54759,
54767
],
[
54799,
54807
],
[
54834,
54842
],
[
54872,
54880
],
[
54908,
54916
],
[
54947,
54955
],
[
55009,
55017
],
[
55062,
55070
],
[
55115,
55123
],
[
55169,
55177
],
[
55224,
55232
],
[
55377,
55385
],
[
55434,
55442
],
[
55480,
55488
],
[
55526,
55534
],
[
55578,
55586
],
[
55627,
55635
],
[
55662,
55670
],
[
55701,
55709
],
[
55746,
55754
],
[
55784,
55792
],
[
55816,
55824
],
[
55853,
55861
],
[
55894,
55902
],
[
55926,
55934
],
[
55957,
55965
],
[
55999,
56007
],
[
56042,
56050
],
[
56075,
56083
],
[
56112,
56120
],
[
56157,
56165
],
[
56202,
56210
],
[
56246,
56254
],
[
56304,
56312
],
[
56350,
56358
],
[
56387,
56395
],
[
56433,
56441
],
[
56479,
56487
],
[
56532,
56540
],
[
56577,
56585
],
[
56612,
56620
],
[
56661,
56669
],
[
56710,
56718
],
[
56757,
56765
],
[
56801,
56809
],
[
56852,
56860
],
[
56911,
56919
],
[
56945,
56953
],
[
56994,
57002
],
[
57095,
57103
],
[
57149,
57157
],
[
57193,
57201
],
[
57238,
57246
],
[
57291,
57299
],
[
57343,
57351
],
[
57391,
57399
],
[
57429,
57437
],
[
57470,
57478
],
[
57510,
57518
],
[
57549,
57557
],
[
57611,
57619
],
[
57664,
57672
],
[
57717,
57725
],
[
57771,
57779
],
[
57816,
57824
],
[
57848,
57856
],
[
57915,
57923
],
[
57961,
57969
],
[
58011,
58019
],
[
58068,
58076
],
[
58115,
58123
],
[
58181,
58189
],
[
58249,
58257
],
[
58294,
58302
],
[
58348,
58356
],
[
58397,
58405
],
[
58460,
58468
],
[
58508,
58516
],
[
58543,
58551
],
[
58576,
58584
],
[
58619,
58627
],
[
58660,
58668
],
[
58706,
58714
],
[
58744,
58752
],
[
58780,
58788
],
[
58887,
58895
],
[
58941,
58949
],
[
58996,
59004
],
[
59043,
59051
],
[
59080,
59088
],
[
59149,
59157
],
[
59203,
59211
],
[
59255,
59263
],
[
59292,
59300
],
[
59349,
59357
],
[
59385,
59393
],
[
59417,
59425
],
[
59454,
59462
],
[
59669,
59677
],
[
59724,
59732
],
[
59773,
59781
],
[
59808,
59816
],
[
59847,
59855
],
[
59892,
59900
],
[
59930,
59938
],
[
59962,
59970
],
[
59999,
60007
],
[
60040,
60048
],
[
60072,
60080
],
[
60103,
60111
],
[
60145,
60153
],
[
60188,
60196
],
[
60221,
60229
],
[
60258,
60266
],
[
60303,
60311
],
[
60348,
60356
],
[
60392,
60400
],
[
60450,
60458
],
[
60493,
60501
],
[
60538,
60546
],
[
60587,
60595
],
[
60635,
60643
],
[
60682,
60690
],
[
60737,
60745
],
[
60785,
60793
],
[
60835,
60843
],
[
60879,
60887
],
[
60924,
60932
],
[
60970,
60978
],
[
61006,
61014
],
[
61038,
61046
],
[
61278,
61286
],
[
61335,
61343
],
[
61383,
61391
],
[
61436,
61444
],
[
61496,
61504
],
[
61545,
61553
],
[
61580,
61588
],
[
61619,
61627
],
[
61664,
61672
],
[
61702,
61710
],
[
61734,
61742
],
[
61766,
61774
],
[
61797,
61805
],
[
61839,
61847
],
[
61873,
61881
],
[
61910,
61918
],
[
61956,
61964
],
[
62002,
62010
],
[
62055,
62063
],
[
62100,
62108
],
[
62135,
62143
],
[
62184,
62192
],
[
62233,
62241
],
[
62277,
62285
],
[
62328,
62336
],
[
62387,
62395
],
[
62424,
62432
],
[
62525,
62533
],
[
62579,
62587
],
[
62624,
62632
],
[
62673,
62681
],
[
62713,
62721
],
[
62760,
62768
],
[
62812,
62820
],
[
62864,
62872
],
[
62922,
62930
],
[
62974,
62982
],
[
63023,
63031
],
[
63071,
63079
],
[
63103,
63111
],
[
63135,
63143
],
[
63172,
63180
],
[
63215,
63223
],
[
63248,
63256
],
[
63281,
63289
],
[
63326,
63334
],
[
63370,
63378
],
[
63428,
63436
],
[
63475,
63483
],
[
63519,
63527
],
[
63565,
63573
],
[
63618,
63626
],
[
63806,
63814
],
[
63854,
63862
],
[
63892,
63900
],
[
63933,
63941
],
[
63973,
63981
],
[
64012,
64020
],
[
64074,
64082
],
[
64127,
64135
],
[
64180,
64188
],
[
64234,
64242
],
[
64275,
64283
],
[
64425,
64433
],
[
64460,
64468
],
[
64495,
64503
],
[
64538,
64546
],
[
64597,
64605
],
[
64651,
64659
],
[
64705,
64713
],
[
64764,
64772
],
[
64812,
64820
],
[
64844,
64852
],
[
64884,
64892
],
[
64997,
65005
],
[
65033,
65041
],
[
65065,
65073
],
[
65100,
65108
],
[
65139,
65147
],
[
65190,
65198
],
[
65226,
65234
],
[
65272,
65280
],
[
65315,
65323
],
[
65373,
65381
],
[
65432,
65440
],
[
65485,
65493
],
[
65538,
65546
],
[
65581,
65589
],
[
65638,
65646
],
[
65696,
65704
],
[
65748,
65756
],
[
65800,
65808
],
[
65885,
65893
],
[
65918,
65926
],
[
66069,
66077
],
[
66118,
66126
],
[
66154,
66162
],
[
66186,
66194
],
[
66221,
66229
],
[
66260,
66268
],
[
66305,
66313
],
[
66343,
66351
],
[
66375,
66383
],
[
66412,
66420
],
[
66453,
66461
],
[
66485,
66493
],
[
66516,
66524
],
[
66558,
66566
],
[
66600,
66608
],
[
66643,
66651
],
[
66690,
66698
],
[
66742,
66750
],
[
66775,
66783
],
[
66812,
66820
],
[
66857,
66865
],
[
66902,
66910
],
[
66946,
66954
],
[
67004,
67012
],
[
67052,
67060
],
[
67097,
67105
],
[
67141,
67149
],
[
67185,
67193
],
[
67237,
67245
],
[
67285,
67293
],
[
67370,
67378
],
[
67403,
67411
],
[
67541,
67549
],
[
67590,
67598
],
[
67626,
67634
],
[
67658,
67666
],
[
67693,
67701
],
[
67732,
67740
],
[
67777,
67785
],
[
67815,
67823
],
[
67847,
67855
],
[
67884,
67892
],
[
67925,
67933
],
[
67957,
67965
],
[
67988,
67996
],
[
68030,
68038
],
[
68073,
68081
],
[
68106,
68114
],
[
68143,
68151
],
[
68188,
68196
],
[
68233,
68241
],
[
68277,
68285
],
[
68335,
68343
],
[
68378,
68386
],
[
68431,
68439
],
[
68477,
68485
],
[
68508,
68516
],
[
68543,
68551
],
[
68578,
68586
],
[
68624,
68632
],
[
68679,
68687
],
[
68730,
68738
],
[
68783,
68791
],
[
68838,
68846
],
[
68959,
68967
],
[
68995,
69003
],
[
69027,
69035
],
[
69062,
69070
],
[
69101,
69109
],
[
69141,
69149
],
[
69174,
69182
],
[
69212,
69220
],
[
69244,
69252
],
[
69281,
69289
],
[
69322,
69330
],
[
69354,
69362
],
[
69385,
69393
],
[
69427,
69435
],
[
69469,
69477
],
[
69512,
69520
],
[
69559,
69567
],
[
69611,
69619
],
[
69644,
69652
],
[
69681,
69689
],
[
69726,
69734
],
[
69771,
69779
],
[
69815,
69823
],
[
69873,
69881
],
[
69921,
69929
],
[
69967,
69975
],
[
70008,
70016
],
[
70052,
70060
],
[
70087,
70095
],
[
70142,
70150
],
[
70188,
70196
],
[
70230,
70238
],
[
70280,
70288
],
[
70331,
70339
],
[
70382,
70390
],
[
70434,
70442
],
[
70485,
70493
],
[
70538,
70546
],
[
70587,
70595
],
[
70672,
70680
],
[
70705,
70713
],
[
70827,
70835
],
[
70878,
70886
],
[
70926,
70934
],
[
70975,
70983
],
[
71010,
71018
],
[
71049,
71057
],
[
71094,
71102
],
[
71132,
71140
],
[
71164,
71172
],
[
71201,
71209
],
[
71233,
71241
],
[
71264,
71272
],
[
71306,
71314
],
[
71353,
71361
],
[
71405,
71413
],
[
71442,
71450
],
[
71494,
71502
],
[
71525,
71533
],
[
71570,
71578
],
[
71618,
71626
],
[
71653,
71661
],
[
71705,
71713
],
[
71758,
71766
],
[
71817,
71825
],
[
71871,
71879
],
[
71922,
71930
],
[
72014,
72022
],
[
72050,
72058
],
[
72082,
72090
],
[
72114,
72122
],
[
72155,
72163
],
[
72198,
72206
],
[
72231,
72239
],
[
72268,
72276
],
[
72313,
72321
],
[
72358,
72366
],
[
72402,
72410
],
[
72460,
72468
],
[
72506,
72514
],
[
72544,
72552
],
[
72580,
72588
],
[
72634,
72642
],
[
72678,
72686
],
[
72730,
72738
],
[
72769,
72777
],
[
72808,
72816
],
[
72848,
72856
],
[
72883,
72891
],
[
72928,
72936
],
[
73073,
73081
],
[
73122,
73130
],
[
73171,
73179
],
[
73206,
73214
],
[
73245,
73253
],
[
73290,
73298
],
[
73328,
73336
],
[
73360,
73368
],
[
73397,
73405
],
[
73429,
73437
],
[
73460,
73468
],
[
73492,
73500
],
[
73534,
73542
],
[
73566,
73574
],
[
73602,
73610
],
[
73655,
73663
],
[
73698,
73706
],
[
73733,
73741
],
[
73782,
73790
],
[
73832,
73840
],
[
73888,
73896
],
[
73940,
73948
],
[
74093,
74101
],
[
74142,
74150
],
[
74177,
74185
],
[
74216,
74224
],
[
74261,
74269
],
[
74299,
74307
],
[
74331,
74339
],
[
74368,
74376
],
[
74400,
74408
],
[
74431,
74439
],
[
74473,
74481
],
[
74515,
74523
],
[
74562,
74570
],
[
74614,
74622
],
[
74652,
74660
],
[
74695,
74703
],
[
74737,
74745
],
[
74785,
74793
],
[
74817,
74825
],
[
74849,
74857
],
[
74890,
74898
],
[
74933,
74941
],
[
74966,
74974
],
[
75003,
75011
],
[
75048,
75056
],
[
75093,
75101
],
[
75137,
75145
],
[
75195,
75203
],
[
75352,
75360
],
[
75401,
75409
],
[
75436,
75444
],
[
75475,
75483
],
[
75520,
75528
],
[
75558,
75566
],
[
75590,
75598
],
[
75627,
75635
],
[
75659,
75667
],
[
75690,
75698
],
[
75732,
75740
],
[
75774,
75782
],
[
75821,
75829
],
[
75873,
75881
],
[
75917,
75925
],
[
75952,
75960
],
[
75999,
76007
],
[
76046,
76054
],
[
76100,
76108
],
[
76154,
76162
],
[
76198,
76206
],
[
76243,
76251
],
[
76348,
76356
],
[
76426,
76434
],
[
76462,
76470
],
[
76494,
76502
],
[
76535,
76543
],
[
76578,
76586
],
[
76611,
76619
],
[
76648,
76656
],
[
76693,
76701
],
[
76738,
76746
],
[
76782,
76790
],
[
76840,
76848
],
[
76889,
76897
],
[
76934,
76942
],
[
76978,
76986
],
[
77030,
77038
],
[
77166,
77174
],
[
77219,
77227
],
[
77268,
77276
],
[
77303,
77311
],
[
77342,
77350
],
[
77392,
77400
],
[
77424,
77432
],
[
77461,
77469
],
[
77493,
77501
],
[
77524,
77532
],
[
77566,
77574
],
[
77608,
77616
],
[
77655,
77663
],
[
77707,
77715
],
[
77740,
77748
],
[
77787,
77795
],
[
77833,
77841
],
[
77866,
77874
],
[
77907,
77915
],
[
77950,
77958
],
[
77983,
77991
],
[
78020,
78028
],
[
78065,
78073
],
[
78110,
78118
],
[
78154,
78162
],
[
78212,
78220
],
[
78258,
78266
],
[
78336,
78344
],
[
78373,
78381
],
[
78425,
78433
],
[
78462,
78470
],
[
78508,
78516
],
[
78540,
78548
],
[
78580,
78588
],
[
78619,
78627
],
[
78658,
78666
],
[
78698,
78706
],
[
78733,
78741
],
[
78778,
78786
],
[
78924,
78932
],
[
78960,
78968
],
[
78995,
79003
],
[
79034,
79042
],
[
79086,
79094
],
[
79132,
79140
],
[
79169,
79177
],
[
79217,
79225
],
[
79273,
79281
],
[
79322,
79330
],
[
79369,
79377
],
[
79402,
79410
],
[
79446,
79454
],
[
79598,
79606
],
[
79648,
79656
],
[
79696,
79704
],
[
79735,
79743
],
[
79783,
79791
],
[
79816,
79824
],
[
79854,
79862
],
[
79886,
79894
],
[
79923,
79931
],
[
79955,
79963
],
[
79986,
79994
],
[
80018,
80026
],
[
80060,
80068
],
[
80107,
80115
],
[
80159,
80167
],
[
80196,
80204
],
[
80248,
80256
],
[
80281,
80289
],
[
80325,
80333
],
[
80370,
80378
],
[
80412,
80420
],
[
80454,
80462
],
[
80502,
80510
],
[
80552,
80560
],
[
80600,
80608
],
[
80691,
80699
],
[
80734,
80742
],
[
80767,
80775
],
[
80804,
80812
],
[
80849,
80857
],
[
80894,
80902
],
[
80938,
80946
],
[
80996,
81004
],
[
81043,
81051
],
[
81076,
81084
],
[
81112,
81120
],
[
81144,
81152
],
[
81422,
81430
],
[
81470,
81478
],
[
81509,
81517
],
[
81557,
81565
],
[
81590,
81598
],
[
81628,
81636
],
[
81660,
81668
],
[
81697,
81705
],
[
81729,
81737
],
[
81760,
81768
],
[
81802,
81810
],
[
81844,
81852
],
[
81891,
81899
],
[
81943,
81951
],
[
81976,
81984
],
[
82020,
82028
],
[
82065,
82073
],
[
82107,
82115
],
[
82149,
82157
],
[
82197,
82205
],
[
82247,
82255
],
[
82295,
82303
],
[
82377,
82385
],
[
82418,
82426
],
[
82461,
82469
],
[
82494,
82502
],
[
82531,
82539
],
[
82576,
82584
],
[
82621,
82629
],
[
82665,
82673
],
[
82723,
82731
],
[
82770,
82778
],
[
82803,
82811
],
[
82839,
82847
],
[
82871,
82879
],
[
82910,
82918
],
[
82949,
82957
],
[
82989,
82997
],
[
83024,
83032
],
[
83069,
83077
],
[
83210,
83218
],
[
83256,
83264
],
[
83294,
83302
],
[
83326,
83334
],
[
83363,
83371
],
[
83395,
83403
],
[
83426,
83434
],
[
83468,
83476
],
[
83515,
83523
],
[
83567,
83575
],
[
83609,
83617
],
[
83654,
83662
],
[
83692,
83700
],
[
83729,
83737
],
[
83774,
83782
],
[
83819,
83827
],
[
83864,
83872
],
[
83900,
83908
],
[
83941,
83949
],
[
83973,
83981
],
[
84014,
84022
],
[
84057,
84065
],
[
84090,
84098
],
[
84124,
84132
],
[
84182,
84190
],
[
84267,
84275
],
[
84316,
84324
],
[
84456,
84464
],
[
84505,
84513
],
[
84542,
84550
],
[
84573,
84581
],
[
84700,
84708
],
[
84736,
84744
],
[
84768,
84776
],
[
84810,
84818
],
[
84850,
84858
],
[
84896,
84904
],
[
84934,
84942
],
[
84966,
84974
],
[
85003,
85011
],
[
85035,
85043
],
[
85066,
85074
],
[
85108,
85116
],
[
85155,
85163
],
[
85207,
85215
],
[
85250,
85258
],
[
85303,
85311
],
[
85358,
85366
],
[
85411,
85419
],
[
85466,
85474
],
[
85521,
85529
],
[
85574,
85582
],
[
85631,
85639
],
[
85682,
85690
],
[
85749,
85757
],
[
85806,
85814
],
[
85875,
85883
],
[
85934,
85942
],
[
85993,
86001
],
[
86042,
86050
],
[
86095,
86103
],
[
86160,
86168
],
[
86215,
86223
],
[
86259,
86267
],
[
86300,
86308
],
[
86343,
86351
],
[
86376,
86384
],
[
86413,
86421
],
[
86458,
86466
],
[
86503,
86511
],
[
86547,
86555
],
[
86605,
86613
],
[
86654,
86662
],
[
86709,
86717
],
[
86764,
86772
],
[
86821,
86829
],
[
86876,
86884
],
[
86922,
86930
],
[
87011,
87019
],
[
87050,
87058
],
[
87090,
87098
],
[
87125,
87133
],
[
87170,
87178
],
[
87273,
87281
],
[
87322,
87330
],
[
87358,
87366
],
[
87390,
87398
],
[
87425,
87433
],
[
87458,
87466
],
[
87496,
87504
],
[
87528,
87536
],
[
87565,
87573
],
[
87606,
87614
],
[
87638,
87646
],
[
87669,
87677
],
[
87701,
87709
],
[
87743,
87751
],
[
87785,
87793
],
[
87828,
87836
],
[
87875,
87883
],
[
87927,
87935
],
[
87960,
87968
],
[
87997,
88005
],
[
88042,
88050
],
[
88087,
88095
],
[
88131,
88139
],
[
88189,
88197
],
[
88238,
88246
],
[
88285,
88293
],
[
88338,
88346
],
[
88387,
88395
],
[
88435,
88443
],
[
88486,
88494
],
[
88525,
88533
],
[
88565,
88573
],
[
88600,
88608
],
[
88645,
88653
],
[
88737,
88745
],
[
88773,
88781
],
[
88805,
88813
],
[
88840,
88848
],
[
88879,
88887
],
[
88925,
88933
],
[
88956,
88964
],
[
89008,
89016
],
[
89051,
89059
],
[
89085,
89093
],
[
89131,
89139
],
[
89173,
89181
],
[
89225,
89233
],
[
89277,
89285
],
[
89325,
89333
],
[
89371,
89379
],
[
89404,
89412
],
[
89444,
89452
],
[
89543,
89551
],
[
89579,
89587
],
[
89611,
89619
],
[
89646,
89654
],
[
89685,
89693
],
[
89728,
89736
],
[
89776,
89784
],
[
89809,
89817
],
[
89848,
89856
],
[
89902,
89910
],
[
89962,
89970
],
[
90015,
90023
],
[
90079,
90087
],
[
90144,
90152
],
[
90209,
90217
],
[
90274,
90282
],
[
90321,
90329
],
[
90368,
90376
],
[
90421,
90429
],
[
90473,
90481
],
[
90505,
90513
],
[
90614,
90622
],
[
90650,
90658
],
[
90682,
90690
],
[
90717,
90725
],
[
90756,
90764
],
[
90808,
90816
],
[
90854,
90862
],
[
90892,
90900
],
[
90924,
90932
],
[
90961,
90969
],
[
91002,
91010
],
[
91034,
91042
],
[
91065,
91073
],
[
91107,
91115
],
[
91149,
91157
],
[
91196,
91204
],
[
91239,
91247
],
[
91291,
91299
],
[
91324,
91332
],
[
91361,
91369
],
[
91406,
91414
],
[
91451,
91459
],
[
91495,
91503
],
[
91553,
91561
],
[
91596,
91604
],
[
91651,
91659
],
[
91686,
91694
],
[
91735,
91743
],
[
91781,
91789
],
[
91827,
91835
],
[
91864,
91872
],
[
91909,
91917
],
[
91987,
91995
],
[
92090,
92098
],
[
92126,
92134
],
[
92158,
92166
],
[
92193,
92201
],
[
92232,
92240
],
[
92284,
92292
],
[
92330,
92338
],
[
92368,
92376
],
[
92400,
92408
],
[
92437,
92445
],
[
92478,
92486
],
[
92510,
92518
],
[
92541,
92549
],
[
92583,
92591
],
[
92625,
92633
],
[
92672,
92680
],
[
92715,
92723
],
[
92767,
92775
],
[
92800,
92808
],
[
92837,
92845
],
[
92882,
92890
],
[
92927,
92935
],
[
92971,
92979
],
[
93029,
93037
],
[
93072,
93080
],
[
93127,
93135
],
[
93162,
93170
],
[
93211,
93219
],
[
93257,
93265
],
[
93303,
93311
],
[
93340,
93348
],
[
93385,
93393
],
[
93463,
93471
],
[
93554,
93562
],
[
93590,
93598
],
[
93622,
93630
],
[
93657,
93665
],
[
93696,
93704
],
[
93748,
93756
],
[
93794,
93802
],
[
93832,
93840
],
[
93864,
93872
],
[
93901,
93909
],
[
93942,
93950
],
[
93974,
93982
],
[
94005,
94013
],
[
94047,
94055
],
[
94089,
94097
],
[
94136,
94144
],
[
94179,
94187
],
[
94231,
94239
],
[
94264,
94272
],
[
94301,
94309
],
[
94346,
94354
],
[
94391,
94399
],
[
94435,
94443
],
[
94493,
94501
],
[
94536,
94544
],
[
94580,
94588
],
[
94614,
94622
],
[
94646,
94654
],
[
94691,
94699
],
[
94731,
94739
],
[
94777,
94785
],
[
94824,
94832
],
[
94883,
94891
],
[
94923,
94931
]
],
[
[
3087,
3092
],
[
11944,
11949
],
[
12767,
12772
],
[
13364,
13369
],
[
18899,
18904
],
[
19972,
19977
],
[
32386,
32391
],
[
33522,
33527
],
[
34635,
34640
],
[
35571,
35576
],
[
35623,
35628
],
[
36756,
36761
],
[
37172,
37177
],
[
37974,
37979
],
[
38467,
38472
],
[
38809,
38814
],
[
38866,
38871
],
[
38912,
38917
],
[
38958,
38963
],
[
39010,
39015
],
[
39524,
39529
],
[
40258,
40263
],
[
42673,
42678
],
[
42730,
42735
],
[
42776,
42781
],
[
42822,
42827
],
[
42874,
42879
],
[
43388,
43393
],
[
44122,
44127
],
[
46324,
46329
],
[
46374,
46379
],
[
47283,
47288
],
[
48468,
48473
],
[
49155,
49160
],
[
49267,
49272
],
[
50083,
50088
],
[
50825,
50830
],
[
50871,
50876
],
[
51365,
51370
],
[
52052,
52057
],
[
52164,
52169
],
[
52980,
52985
],
[
53722,
53727
],
[
53768,
53773
],
[
54500,
54505
],
[
55386,
55391
],
[
55443,
55448
],
[
55489,
55494
],
[
55535,
55540
],
[
55587,
55592
],
[
56255,
56260
],
[
57044,
57049
],
[
59118,
59123
],
[
59678,
59683
],
[
59733,
59738
],
[
60401,
60406
],
[
61287,
61292
],
[
61344,
61349
],
[
61392,
61397
],
[
61445,
61450
],
[
61505,
61510
],
[
62474,
62479
],
[
63379,
63384
],
[
65849,
65854
],
[
66078,
66083
],
[
66955,
66960
],
[
67334,
67339
],
[
67550,
67555
],
[
68286,
68291
],
[
69824,
69829
],
[
70636,
70641
],
[
70836,
70841
],
[
70887,
70892
],
[
70935,
70940
],
[
71971,
71976
],
[
72411,
72416
],
[
73082,
73087
],
[
73131,
73136
],
[
74102,
74107
],
[
75146,
75151
],
[
75361,
75366
],
[
76357,
76362
],
[
76393,
76398
],
[
76791,
76796
],
[
77175,
77180
],
[
77228,
77233
],
[
78163,
78168
],
[
78295,
78300
],
[
79095,
79100
],
[
79657,
79662
],
[
80649,
80654
],
[
80947,
80952
],
[
81431,
81436
],
[
82344,
82349
],
[
82674,
82679
],
[
83219,
83224
],
[
84133,
84138
],
[
84231,
84236
],
[
84859,
84864
],
[
86556,
86561
],
[
86971,
86976
],
[
87282,
87287
],
[
88140,
88145
],
[
90817,
90822
],
[
91504,
91509
],
[
91946,
91951
],
[
92293,
92298
],
[
92980,
92985
],
[
93422,
93427
],
[
93757,
93762
],
[
94444,
94449
],
[
94960,
94965
]
],
[
[
3125,
3136
],
[
3395,
3406
],
[
3832,
3843
],
[
7710,
7721
],
[
9193,
9204
],
[
11848,
11859
],
[
13860,
13871
],
[
18806,
18817
],
[
20906,
20917
],
[
32308,
32319
],
[
34544,
34555
],
[
35498,
35509
],
[
37856,
37867
],
[
38732,
38743
],
[
42596,
42607
],
[
46235,
46246
],
[
48247,
48258
],
[
51133,
51144
],
[
55302,
55313
],
[
59529,
59540
],
[
61202,
61213
],
[
64353,
64364
],
[
64968,
64979
],
[
65990,
66001
],
[
67468,
67479
],
[
70767,
70778
],
[
72999,
73010
],
[
74013,
74024
],
[
75276,
75287
],
[
77099,
77110
],
[
78849,
78860
],
[
79523,
79534
],
[
81342,
81353
],
[
83136,
83147
],
[
84384,
84395
],
[
84671,
84682
],
[
87240,
87251
],
[
88708,
88719
],
[
89514,
89525
],
[
90585,
90596
],
[
92061,
92072
],
[
93525,
93536
]
],
[
[
3377,
3394
]
],
[
[
3818,
3831
]
],
[
[
7691,
7709
],
[
9130,
9148
]
],
[
[
9108,
9127
]
],
[
[
9181,
9192
],
[
11772,
11783
]
],
[
[
11757,
11769
]
],
[
[
11816,
11847
]
],
[
[
13828,
13859
]
],
[
[
18771,
18805
]
],
[
[
20887,
20905
]
],
[
[
32287,
32307
]
],
[
[
34522,
34543
]
],
[
[
35485,
35497
]
],
[
[
37850,
37855
]
],
[
[
38726,
38731
]
],
[
[
42592,
42595
]
],
[
[
46209,
46234
]
],
[
[
48233,
48246
]
],
[
[
51127,
51132
]
],
[
[
55289,
55301
]
],
[
[
59519,
59528
],
[
61146,
61155
]
],
[
[
61133,
61143
]
],
[
[
61188,
61201
]
],
[
[
64328,
64352
]
],
[
[
64949,
64967
]
],
[
[
65971,
65989
]
],
[
[
67456,
67467
]
],
[
[
68903,
68909
]
],
[
[
70758,
70766
]
],
[
[
72981,
72998
]
],
[
[
73993,
74012
]
],
[
[
75260,
75275
]
],
[
[
77083,
77098
]
],
[
[
78831,
78848
]
],
[
[
79499,
79522
],
[
81266,
81289
]
],
[
[
81239,
81263
]
],
[
[
81322,
81341
]
],
[
[
83123,
83135
]
],
[
[
84369,
84383
]
],
[
[
84638,
84670
]
],
[
[
87223,
87239
]
],
[
[
88698,
88707
]
],
[
[
89497,
89513
],
[
95076,
95092
]
],
[
[
90570,
90584
]
],
[
[
92040,
92060
]
],
[
[
93516,
93524
]
],
[
[
95056,
95073
]
]
] |
import os
import aiofiles
import webbrowser
import json as stdlib_json
from sanic import Sanic, response
from sanic.exceptions import abort
from sanic.response import json
from pyfy import AsyncSpotify, ClientCreds, AuthError
try:
from spt_keys import KEYS
except: # noqa: E722
from spt_keys_template import KEYS
app = Sanic(__name__)
local_address = "localhost"
local_port = "5000"
local_full_address = local_address + ":" + str(local_port)
spt = AsyncSpotify()
client = ClientCreds()
state = "123"
@app.route("/authorize")
def authorize(request):
export_keys()
client.load_from_env()
spt.client_creds = client
if spt.is_oauth_ready:
return response.redirect(spt.auth_uri(state=state))
else:
return (
json(
{
"error_description": "Client needs client_id, client_secret and a redirect uri in order to handle OAauth properly"
}
),
500,
)
@app.route("/callback/spotify") # You have to register this callback
async def spotify_callback(request):
if request.args.get("error"):
return json(dict(error=request.args.get("error_description")))
elif request.args.get("code"):
grant = request.args.get("code")
callback_state = request.args.get("state")
if callback_state != state:
abort(401)
try:
user_creds = await spt.build_user_creds(grant=grant)
async with aiofiles.open(os.getcwd() + "SPOTIFY_CREDS.json", "w") as file:
await file.write(stdlib_json.dumps(user_creds.__dict__))
except AuthError as e:
return json(dict(error_description=e.msg, error_code=e.code), e.code)
else:
await spt.populate_user_creds()
print(os.getcwd())
return await response.file(os.getcwd() + "SPOTIFY_CREDS.json")
# return response.json(dict(user_creds=user_creds.__dict__, check_if_active=app.url_for('is_active', _scheme='http', _external=True, _server=local_full_address)), 200)
else:
return response.text("Something is wrong with your callback")
@app.route("/is_active")
async def is_active(request):
return json(
dict(
is_active=await spt.is_active,
your_tracks=app.url_for(
"tracks", _scheme="http", _external=True, _server=local_full_address
),
your_playlists=app.url_for(
"playlists", _scheme="http", _external=True, _server=local_full_address
),
)
)
@app.route("/dump_creds")
def dump_creds(request):
# TODO: save both client and user creds and send to user as json files to downlaod
return response.text("Not Implemented")
@app.route("/")
def index(request):
return response.text("OK")
@app.route("/tracks")
async def tracks(request):
return json(await spt.user_tracks())
@app.route("/playlists")
async def playlists(request):
return json(await spt.user_playlists())
def export_keys():
for k, v in KEYS.items():
if v:
os.environ[k] = v
print("export " + k + "=" + v)
if __name__ == "__main__":
webbrowser.open_new_tab("http://" + local_full_address + "/authorize")
app.run(host=local_address, port=str(local_port), debug=True)
| [
[
[
7,
9
],
[
1502,
1504
],
[
1814,
1816
],
[
1866,
1868
],
[
3112,
3114
]
],
[
[
17,
25
],
[
1488,
1496
]
],
[
[
33,
43
],
[
3206,
3216
]
],
[
[
51,
70
],
[
1585,
1596
]
],
[
[
90,
95
],
[
333,
338
]
],
[
[
97,
105
],
[
684,
692
],
[
1852,
1860
],
[
2107,
2115
],
[
2740,
2748
],
[
2822,
2830
]
],
[
[
135,
140
],
[
1376,
1381
]
],
[
[
168,
172
],
[
768,
772
],
[
1145,
1149
],
[
1675,
1679
],
[
2230,
2234
],
[
2904,
2908
],
[
3002,
3006
]
],
[
[
191,
203
],
[
464,
476
]
],
[
[
205,
216
],
[
488,
499
]
],
[
[
218,
227
],
[
1640,
1649
]
],
[
[
259,
263
],
[
3072,
3076
]
],
[
[
320,
324
],
[
3072,
3076
]
],
[
[
327,
330
],
[
519,
522
],
[
990,
993
],
[
2165,
2168
],
[
2592,
2595
],
[
2776,
2779
],
[
2845,
2848
],
[
2937,
2940
],
[
3281,
3284
],
[
2317,
2320
],
[
2457,
2460
]
],
[
[
350,
363
],
[
419,
432
],
[
3294,
3307
]
],
[
[
378,
388
],
[
445,
455
],
[
3318,
3328
]
],
[
[
398,
416
],
[
3242,
3260
],
[
2396,
2414
],
[
2539,
2557
]
],
[
[
458,
461
],
[
616,
619
],
[
649,
652
],
[
702,
705
],
[
1431,
1434
],
[
1770,
1773
],
[
2278,
2281
],
[
2915,
2918
],
[
3013,
3016
]
],
[
[
479,
485
],
[
589,
595
],
[
635,
641
]
],
[
[
502,
507
],
[
721,
726
],
[
1357,
1362
]
],
[
[
547,
556
]
],
[
[
1059,
2161
]
],
[
[
2189,
2588
]
],
[
[
2621,
2631
]
],
[
[
2795,
2800
]
],
[
[
2866,
2933
]
],
[
[
2961,
3034
]
],
[
[
3041,
3052
],
[
571,
582
]
]
] |
"""
This test will initialize the display using displayio and draw a solid green
background, a smaller purple rectangle, and some yellow text. All drawing is done
using native displayio modules.
Pinouts are for the 2.4" TFT FeatherWing or Breakout with a Feather M4 or M0.
"""
import board
import terminalio
import displayio
from adafruit_display_text import label
import adafruit_ili9341
# Release any resources currently in use for the displays
displayio.release_displays()
spi = board.SPI()
tft_cs = board.D9
tft_dc = board.D10
display_bus = displayio.FourWire(
spi, command=tft_dc, chip_select=tft_cs, reset=board.D6
)
display = adafruit_ili9341.ILI9341(display_bus, width=320, height=240)
# Make the display context
splash = displayio.Group(max_size=10)
display.show(splash)
# Draw a green background
color_bitmap = displayio.Bitmap(320, 240, 1)
color_palette = displayio.Palette(1)
color_palette[0] = 0x00FF00 # Bright Green
bg_sprite = displayio.TileGrid(color_bitmap, pixel_shader=color_palette, x=0, y=0)
splash.append(bg_sprite)
# Draw a smaller inner rectangle
inner_bitmap = displayio.Bitmap(280, 200, 1)
inner_palette = displayio.Palette(1)
inner_palette[0] = 0xAA0088 # Purple
inner_sprite = displayio.TileGrid(inner_bitmap, pixel_shader=inner_palette, x=20, y=20)
splash.append(inner_sprite)
# Draw a label
text_group = displayio.Group(max_size=10, scale=3, x=57, y=120)
text = "Hello World!"
text_area = label.Label(terminalio.FONT, text=text, color=0xFFFF00)
text_group.append(text_area) # Subgroup for text scaling
splash.append(text_group)
while True:
pass
| [
[
[
292,
297
],
[
501,
506
],
[
523,
528
],
[
542,
547
],
[
641,
646
]
],
[
[
306,
316
],
[
1496,
1506
]
],
[
[
325,
334
],
[
463,
472
],
[
569,
578
],
[
765,
774
],
[
861,
870
],
[
908,
917
],
[
989,
998
],
[
1140,
1149
],
[
1187,
1196
],
[
1263,
1272
],
[
1397,
1406
]
],
[
[
370,
375
],
[
1484,
1489
]
],
[
[
384,
400
],
[
664,
680
]
],
[
[
495,
498
],
[
594,
597
]
],
[
[
514,
520
],
[
627,
633
]
],
[
[
533,
539
],
[
607,
613
]
],
[
[
555,
566
],
[
689,
700
]
],
[
[
654,
661
],
[
795,
802
]
],
[
[
756,
762
],
[
808,
814
],
[
1063,
1069
],
[
1337,
1343
],
[
1600,
1606
]
],
[
[
846,
858
],
[
1008,
1020
]
],
[
[
892,
905
],
[
930,
943
],
[
1035,
1048
]
],
[
[
977,
986
],
[
1077,
1086
]
],
[
[
1125,
1137
],
[
1282,
1294
]
],
[
[
1171,
1184
],
[
1209,
1222
],
[
1309,
1322
]
],
[
[
1248,
1260
],
[
1351,
1363
]
],
[
[
1384,
1394
],
[
1541,
1551
],
[
1614,
1624
]
],
[
[
1449,
1453
],
[
1518,
1522
]
],
[
[
1472,
1481
],
[
1559,
1568
]
]
] |
###############################################################################
##
## Copyright (c) Crossbar.io Technologies GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from case import Case
class Case1_2_5(Case):
DESCRIPTION = """Send binary message message with payload of length 128."""
EXPECTATION = """Receive echo'ed binary message (with payload as sent). Clean close with normal code."""
def onOpen(self):
payload = "\xfe" * 128
self.expected[Case.OK] = [("message", payload, True)]
self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_NORMAL],"requireClean":True}
self.p.sendFrame(opcode = 2, payload = payload)
self.p.killAfter(1)
| [
[
[
817,
821
],
[
841,
845
],
[
1116,
1120
]
],
[
[
831,
840
]
]
] |
a = int(input())
for i in range(1,11):
total = i*a
print('{} x {} = {}'.format(i, a,total))
| [
[
[
0,
1
],
[
54,
55
],
[
91,
92
]
],
[
[
22,
23
],
[
52,
53
],
[
88,
89
]
],
[
[
44,
49
],
[
93,
98
]
]
] |
import copy
import torch.nn as nn
from rlkit.launchers.launcher_util import setup_logger
import rlkit.torch.pytorch_util as ptu
from rlkit.core.ma_eval_util import get_generic_ma_path_information
def experiment(variant):
num_agent = variant['num_agent']
from differential_game import DifferentialGame
expl_env = DifferentialGame(game_name=args.exp_name)
eval_env = DifferentialGame(game_name=args.exp_name)
obs_dim = eval_env.observation_space.low.size
action_dim = eval_env.action_space.low.size
from rlkit.torch.networks.graph_builders import FullGraphBuilder
graph_builder1 = FullGraphBuilder(
input_node_dim=obs_dim+action_dim,
num_node=num_agent,
contain_self_loop=False)
from rlkit.torch.networks.gnn_networks import GNNNet
gnn1 = GNNNet(
graph_builder1,
node_dim=variant['qf_kwargs']['hidden_dim'],
conv_type=variant['qf_kwargs']['conv_type'],
num_conv_layers=1,
hidden_activation='relu',
output_activation='relu',
)
qf1 = nn.Sequential(
gnn1,
nn.Linear(variant['qf_kwargs']['hidden_dim'],1)
)
target_qf1 = copy.deepcopy(qf1)
from rlkit.torch.networks.graph_builders import FullGraphBuilder
graph_builder2 = FullGraphBuilder(
input_node_dim=obs_dim+action_dim,
num_node=num_agent,
contain_self_loop=False)
from rlkit.torch.networks.gnn_networks import GNNNet
gnn2 = GNNNet(
graph_builder2,
node_dim=variant['qf_kwargs']['hidden_dim'],
conv_type=variant['qf_kwargs']['conv_type'],
num_conv_layers=1,
hidden_activation='relu',
output_activation='relu',
)
qf2 = nn.Sequential(
gnn2,
nn.Linear(variant['qf_kwargs']['hidden_dim'],1)
)
target_qf2 = copy.deepcopy(qf2)
policy_n, eval_policy_n, expl_policy_n = [], [], []
for i in range(num_agent):
from rlkit.torch.networks.layers import SplitLayer
policy = nn.Sequential(
nn.Linear(obs_dim,variant['policy_kwargs']['hidden_dim']),
nn.ReLU(),
nn.Linear(variant['policy_kwargs']['hidden_dim'],variant['policy_kwargs']['hidden_dim']),
nn.ReLU(),
SplitLayer(layers=[nn.Linear(variant['policy_kwargs']['hidden_dim'],action_dim),
nn.Linear(variant['policy_kwargs']['hidden_dim'],action_dim)])
)
from rlkit.torch.policies.tanh_gaussian_policy import TanhGaussianPolicy
policy = TanhGaussianPolicy(module=policy)
from rlkit.torch.policies.make_deterministic import MakeDeterministic
eval_policy = MakeDeterministic(policy)
from rlkit.exploration_strategies.base import PolicyWrappedWithExplorationStrategy
if variant['random_exploration']:
from rlkit.exploration_strategies.epsilon_greedy import EpsilonGreedy
expl_policy = PolicyWrappedWithExplorationStrategy(
exploration_strategy=EpsilonGreedy(expl_env.action_space, prob_random_action=1.0),
policy=policy,
)
else:
expl_policy = policy
policy_n.append(policy)
eval_policy_n.append(eval_policy)
expl_policy_n.append(expl_policy)
from rlkit.samplers.data_collector.ma_path_collector import MAMdpPathCollector
eval_path_collector = MAMdpPathCollector(eval_env, eval_policy_n)
expl_path_collector = MAMdpPathCollector(expl_env, expl_policy_n)
from rlkit.data_management.ma_env_replay_buffer import MAEnvReplayBuffer
replay_buffer = MAEnvReplayBuffer(variant['replay_buffer_size'], expl_env, num_agent=num_agent)
from rlkit.torch.masac.masac_gnn import MASACGNNTrainer
trainer = MASACGNNTrainer(
env = expl_env,
qf1=qf1,
target_qf1=target_qf1,
qf2=qf2,
target_qf2=target_qf2,
policy_n=policy_n,
**variant['trainer_kwargs']
)
from rlkit.torch.torch_rl_algorithm import TorchBatchRLAlgorithm
algorithm = TorchBatchRLAlgorithm(
trainer=trainer,
exploration_env=expl_env,
evaluation_env=eval_env,
exploration_data_collector=expl_path_collector,
evaluation_data_collector=eval_path_collector,
replay_buffer=replay_buffer,
log_path_function=get_generic_ma_path_information,
**variant['algorithm_kwargs']
)
algorithm.to(ptu.device)
algorithm.train()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--exp_name', type=str, default='zero_sum')
parser.add_argument('--log_dir', type=str, default='MASACGNNGaussian')
parser.add_argument('--conv', type=str, default='GSage')
parser.add_argument('--hidden', type=int, default=16)
parser.add_argument('--oa', action='store_true', default=False) # online action
parser.add_argument('--snl', action='store_true', default=False) # sum n loss
parser.add_argument('--re', action='store_true', default=False) # random exploration
parser.add_argument('--alpha', type=float, default=None) # init alpha
parser.add_argument('--fa', action='store_true', default=False) # fix alpha
parser.add_argument('--lr', type=float, default=None)
parser.add_argument('--bs', type=int, default=None)
parser.add_argument('--epoch', type=int, default=None)
parser.add_argument('--seed', type=int, default=0)
parser.add_argument('--snapshot_mode', type=str, default="gap_and_last")
parser.add_argument('--snapshot_gap', type=int, default=500)
args = parser.parse_args()
import os.path as osp
pre_dir = './Data/'+args.exp_name
main_dir = args.log_dir\
+args.conv\
+('hidden'+str(args.hidden))\
+('oa' if args.oa else '')\
+('snl' if args.snl else '')\
+('re' if args.re else '')\
+(('alpha'+str(args.alpha)) if args.alpha else '')\
+('fa' if args.fa else '')\
+(('lr'+str(args.lr)) if args.lr else '')\
+(('bs'+str(args.bs)) if args.bs else '')
log_dir = osp.join(pre_dir,main_dir,'seed'+str(args.seed))
# noinspection PyTypeChecker
variant = dict(
num_agent=2,
random_exploration=args.re,
algorithm_kwargs=dict(
num_epochs=(args.epoch if args.epoch else 100),
num_eval_steps_per_epoch=100,
num_trains_per_train_loop=100,
num_expl_steps_per_train_loop=100,
min_num_steps_before_training=100,
max_path_length=100,
batch_size=(args.bs if args.bs else 256),
),
trainer_kwargs=dict(
use_soft_update=True,
tau=1e-2,
discount=0.99,
qf_learning_rate=(args.lr if args.lr else 1e-3),
policy_learning_rate=(args.lr if args.lr else 1e-4),
online_action=args.oa,
sum_n_loss=args.snl,
init_alpha=(args.alpha if args.alpha else 1.),
use_automatic_entropy_tuning=(not args.fa),
),
qf_kwargs=dict(
conv_type=args.conv,
hidden_dim=args.hidden,
),
policy_kwargs=dict(
hidden_dim=args.hidden,
),
replay_buffer_size=int(1E6),
)
import os
if not os.path.isdir(log_dir):
os.makedirs(log_dir)
with open(osp.join(log_dir,'variant.json'),'w') as out_json:
import json
json.dump(variant,out_json,indent=2)
import sys
cmd_input = 'python ' + ' '.join(sys.argv) + '\n'
with open(osp.join(log_dir, 'cmd_input.txt'), 'a') as f:
f.write(cmd_input)
setup_logger(args.exp_name+'/'+main_dir, variant=variant,
snapshot_mode=args.snapshot_mode, snapshot_gap=args.snapshot_gap,
log_dir=log_dir)
import numpy as np
import torch
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# ptu.set_gpu_mode(True) # optionally set the GPU (default=False)
experiment(variant)
| [
[
[
7,
11
],
[
1224,
1228
],
[
1945,
1949
]
],
[
[
19,
33
],
[
1104,
1106
],
[
1149,
1151
],
[
1825,
1827
],
[
1870,
1872
],
[
2128,
2130
],
[
2155,
2157
],
[
2226,
2228
],
[
2249,
2251
],
[
2351,
2353
],
[
2393,
2395
],
[
2487,
2489
]
],
[
[
76,
88
],
[
7849,
7861
]
],
[
[
96,
127
],
[
4561,
4564
]
],
[
[
164,
195
],
[
4467,
4498
]
],
[
[
201,
211
],
[
8200,
8210
]
],
[
[
4635,
4643
],
[
4657,
4665
]
],
[
[
4648,
4654
],
[
4687,
4693
],
[
4755,
4761
],
[
4830,
4836
],
[
4891,
4897
],
[
4949,
4955
],
[
5033,
5039
],
[
5115,
5121
],
[
5204,
5210
],
[
5278,
5284
],
[
5358,
5364
],
[
5416,
5422
],
[
5472,
5478
],
[
5531,
5537
],
[
5586,
5592
],
[
5663,
5669
],
[
5735,
5741
]
],
[
[
5728,
5732
],
[
5805,
5809
],
[
5834,
5838
],
[
5865,
5869
],
[
5907,
5911
],
[
5948,
5952
],
[
5993,
5997
],
[
6038,
6042
],
[
6103,
6107
],
[
6087,
6091
],
[
6150,
6154
],
[
6209,
6213
],
[
6196,
6200
],
[
6268,
6272
],
[
6255,
6259
],
[
6336,
6340
],
[
6449,
6453
],
[
6527,
6531
],
[
6513,
6517
],
[
6796,
6800
],
[
6785,
6789
],
[
6979,
6983
],
[
6968,
6972
],
[
7044,
7048
],
[
7033,
7037
],
[
7090,
7094
],
[
7122,
7126
],
[
7170,
7174
],
[
7156,
7160
],
[
7237,
7241
],
[
7304,
7308
],
[
7338,
7342
],
[
7413,
7417
],
[
7862,
7866
],
[
7937,
7941
],
[
7970,
7974
],
[
8081,
8085
],
[
8114,
8118
],
[
352,
356
],
[
409,
413
]
],
[
[
5766,
5780
],
[
6299,
6302
],
[
7572,
7575
],
[
7771,
7774
]
],
[
[
5785,
5792
],
[
6308,
6315
]
],
[
[
5823,
5831
],
[
6316,
6324
],
[
7880,
7888
]
],
[
[
6289,
6296
],
[
7519,
7526
],
[
7549,
7556
],
[
7581,
7588
],
[
7780,
7787
],
[
8013,
8020
]
],
[
[
6385,
6392
],
[
7661,
7668
],
[
7898,
7905
],
[
8211,
8218
]
],
[
[
7491,
7493
],
[
7505,
7507
],
[
7537,
7539
]
],
[
[
7613,
7621
],
[
7669,
7677
]
],
[
[
7638,
7642
],
[
7651,
7655
]
],
[
[
7699,
7702
],
[
7740,
7743
]
],
[
[
7707,
7716
],
[
7834,
7843
]
],
[
[
7815,
7816
],
[
7826,
7827
]
],
[
[
8033,
8044
],
[
8066,
8068
]
],
[
[
8056,
8061
],
[
8096,
8101
]
]
] |
from django.urls import NoReverseMatch
from django.utils import html
from django.utils.translation import ugettext as _
from couchdbkit import ResourceNotFound
from casexml.apps.case.models import CommCareCaseAction
from corehq.apps.case_search.const import (
CASE_COMPUTED_METADATA,
SPECIAL_CASE_PROPERTIES,
SPECIAL_CASE_PROPERTIES_MAP,
)
from corehq.apps.es.case_search import flatten_result
from corehq.apps.groups.models import Group
from corehq.apps.locations.models import SQLLocation
from corehq.apps.reports.v2.models import BaseDataFormatter
from corehq.apps.reports.v2.utils import report_date_to_json
from corehq.apps.users.models import CouchUser
from corehq.util.quickcache import quickcache
from corehq.util.timezones.utils import parse_date
from corehq.util.view_utils import absolute_reverse
class CaseDataFormatter(BaseDataFormatter):
def __init__(self, request, domain, raw_data):
super(CaseDataFormatter, self).__init__(request, domain, raw_data)
self.raw_data = flatten_result(raw_data)
@property
def owner_id(self):
"""Special Case Property @owner_id"""
if 'owner_id' in self.raw_data:
return self.raw_data.get('owner_id')
elif 'user_id' in self.raw_data:
return self.raw_data.gert('user_id')
else:
return ''
@property
def date_opened(self):
"""Special Case Property date_opened"""
return self._fmt_dateprop('opened_on', False)
@property
def last_modified(self):
"""Special Case Property last_modified"""
return self._fmt_dateprop('modified_on', False)
@property
def closed_by_username(self):
"""Computed metadata"""
return self._get_username(self.closed_by_user_id)
@property
def last_modified_by_user_username(self):
"""Computed metadata"""
return self._get_username(self.raw_data.get('user_id'))
@property
def opened_by_username(self):
"""Computed metadata"""
user = self._creating_user
if user is None:
return _("No Data")
return user['name'] or self._user_not_found_display(user['id'])
@property
def owner_name(self):
"""Computed metadata"""
owner_type, owner = self._owner
if owner_type == 'group':
return '<span class="label label-default">%s</span>' % owner['name']
return owner['name']
@property
def closed_by_user_id(self):
"""Computed metadata"""
return self.raw_data.get('closed_by')
@property
def opened_by_user_id(self):
"""Computed metadata"""
user = self._creating_user
if user is None:
return _("No data")
return user['id']
@property
def server_last_modified_date(self):
"""Computed metadata"""
return self._fmt_dateprop('server_modified_on', False)
def get_context(self):
context = {}
context.update(self.raw_data)
context.update(self._case_info_context)
context['_link'] = self._link
return context
@property
def _link(self):
try:
return absolute_reverse(
'case_data', args=[self.domain, self.raw_data.get('_id')]
)
except NoReverseMatch:
return None
@property
def _case_info_context(self):
context = {}
for prop in SPECIAL_CASE_PROPERTIES + CASE_COMPUTED_METADATA:
context[prop] = self._get_case_info_prop(prop)
return context
def _get_case_info_prop(self, prop):
fmt_prop = prop.replace('@', '')
if hasattr(self, fmt_prop):
return getattr(self, fmt_prop)
elif prop in SPECIAL_CASE_PROPERTIES:
return self._get_special_property(prop)
raise NotImplementedError(
"CaseDataFormatter.{} not found".format(prop))
def _get_special_property(self, prop):
return (SPECIAL_CASE_PROPERTIES_MAP[prop]
.value_getter(self.raw_data))
def _fmt_dateprop(self, prop, iso=True):
val = report_date_to_json(
self.request,
self.domain,
parse_date(self.raw_data[prop])
)
if iso:
val = 'T'.join(val.split(' ')) if val else None
return val
@property
@quickcache(['self.owner_id'])
def _owning_group(self):
try:
return Group.get(self.owner_id)
except ResourceNotFound:
return None
@property
@quickcache(['self.owner_id'])
def _location(self):
return SQLLocation.objects.get_or_None(location_id=self.owner_id)
@property
@quickcache(['self.owner_id'])
def _owner(self):
if self._owning_group and self._owning_group.name:
return ('group', {'id': self._owning_group._id,
'name': self._owning_group.name})
elif self._location:
return ('location', {'id': self._location.location_id,
'name': self._location.display_name})
return ('user', self._user_meta(self.owner_id))
@property
def _creating_user(self):
try:
creator_id = self.raw_data['opened_by']
except KeyError:
creator_id = None
if 'actions' in self.raw_data:
for action in self.raw_data['actions']:
if action['action_type'] == 'create':
action_doc = CommCareCaseAction.wrap(action)
creator_id = action_doc.get_user_id()
break
if not creator_id:
return None
return self._user_meta(creator_id)
def _user_meta(self, user_id):
return {'id': user_id, 'name': self._get_username(user_id)}
def _user_not_found_display(self, user_id):
return _("Unknown [%s]") % user_id
@quickcache(['user_id'])
def _get_username(self, user_id):
if not user_id:
return None
try:
user = CouchUser.get_by_user_id(user_id)
if user:
return user.username
except CouchUser.AccountTypeError:
return None
| [
[
[
24,
38
],
[
3297,
3311
]
],
[
[
64,
68
]
],
[
[
106,
119
],
[
2095,
2096
],
[
2722,
2723
],
[
5896,
5897
]
],
[
[
144,
160
],
[
4481,
4497
]
],
[
[
199,
217
],
[
5509,
5527
]
],
[
[
267,
289
],
[
3453,
3475
]
],
[
[
295,
318
],
[
3427,
3450
],
[
3742,
3765
]
],
[
[
324,
351
],
[
3973,
4000
]
],
[
[
394,
408
],
[
1020,
1034
]
],
[
[
447,
452
],
[
4441,
4446
]
],
[
[
494,
505
],
[
4613,
4624
]
],
[
[
548,
565
],
[
849,
866
]
],
[
[
607,
626
],
[
4109,
4128
]
],
[
[
664,
673
],
[
6073,
6082
],
[
6180,
6189
]
],
[
[
709,
719
],
[
4350,
4360
],
[
4543,
4553
],
[
4692,
4702
],
[
5930,
5940
]
],
[
[
760,
770
],
[
4193,
4203
]
],
[
[
806,
822
],
[
3176,
3192
]
],
[
[
831,
848
],
[
935,
952
]
]
] |
# LICENSE: Simplified BSD https://github.com/mmp2/megaman/blob/master/LICENSE
from nose import SkipTest
import numpy as np
from numpy.testing import assert_allclose, assert_raises, assert_equal
from scipy.sparse import isspmatrix
from scipy.spatial.distance import cdist, pdist, squareform
from megaman.geometry import (Geometry, compute_adjacency_matrix, Adjacency,
adjacency_methods)
try:
import pyflann as pyf
NO_PYFLANN = False
except ImportError:
NO_PYFLANN = True
def test_adjacency_methods():
assert_equal(set(adjacency_methods()),
{'auto', 'pyflann', 'ball_tree',
'cyflann', 'brute', 'kd_tree'})
def test_adjacency_input_validation():
X = np.random.rand(20, 3)
# need to specify radius or n_neighbors
assert_raises(ValueError, compute_adjacency_matrix, X)
# cannot specify both radius and n_neighbors
assert_raises(ValueError, compute_adjacency_matrix, X,
radius=1, n_neighbors=10)
def test_adjacency():
rng = np.random.RandomState(36)
X = rng.rand(100, 3)
Gtrue = {}
exact_methods = [m for m in Adjacency.methods()
if not m.endswith('flann')]
def check_kneighbors(n_neighbors, method):
if method == 'pyflann' and NO_PYFLANN:
raise SkipTest("pyflann not installed")
G = compute_adjacency_matrix(X, method=method,
n_neighbors=n_neighbors)
assert isspmatrix(G)
assert G.shape == (X.shape[0], X.shape[0])
if method in exact_methods:
assert_allclose(G.toarray(), Gtrue[n_neighbors].toarray())
def check_radius(radius, method):
if method == 'pyflann' and NO_PYFLANN:
raise SkipTest("pyflann not installed")
G = compute_adjacency_matrix(X, method=method,
radius=radius)
assert isspmatrix(G)
assert G.shape == (X.shape[0], X.shape[0])
if method in exact_methods:
assert_allclose(G.toarray(), Gtrue[radius].toarray())
for n_neighbors in [5, 10, 15]:
Gtrue[n_neighbors] = compute_adjacency_matrix(X, method='brute',
n_neighbors=n_neighbors)
for method in Adjacency.methods():
yield check_kneighbors, n_neighbors, method
for radius in [0.1, 0.5, 1.0]:
Gtrue[radius] = compute_adjacency_matrix(X, method='brute',
radius=radius)
for method in Adjacency.methods():
yield check_radius, radius, method
def test_unknown_method():
X = np.arange(20).reshape((10, 2))
assert_raises(ValueError, compute_adjacency_matrix, X, 'foo')
def test_all_methods_close():
rand = np.random.RandomState(36)
X = rand.randn(10, 2)
D_true = squareform(pdist(X))
D_true[D_true > 0.5] = 0
def check_method(method):
kwargs = {}
if method == 'pyflann':
try:
import pyflann as pyf
except ImportError:
raise SkipTest("pyflann not installed.")
flindex = pyf.FLANN()
flindex.build_index(X, algorithm='kmeans',
target_precision=0.9)
kwargs['flann_index'] = flindex
this_D = compute_adjacency_matrix(X, method=method, radius=0.5,
**kwargs)
assert_allclose(this_D.toarray(), D_true, rtol=1E-5)
for method in ['auto', 'cyflann', 'pyflann', 'brute']:
yield check_method, method
def test_custom_adjacency():
class CustomAdjacency(Adjacency):
name = "custom"
def adjacency_graph(self, X):
return squareform(pdist(X))
rand = np.random.RandomState(42)
X = rand.rand(10, 2)
D = compute_adjacency_matrix(X, method='custom', radius=1)
assert_allclose(D, cdist(X, X))
Adjacency._remove_from_registry("custom")
def test_cyflann_index_type():
rand = np.random.RandomState(36)
X = rand.randn(10, 2)
D_true = squareform(pdist(X))
D_true[D_true > 1.5] = 0
def check_index_type(index_type):
method = 'cyflann'
radius = 1.5
cyflann_kwds = {'index_type':index_type}
adjacency_kwds = {'radius':radius, 'cyflann_kwds':cyflann_kwds}
this_D = compute_adjacency_matrix(X=X, method = 'cyflann', **adjacency_kwds)
assert_allclose(this_D.toarray(), D_true, rtol=1E-5, atol=1E-5)
for index_type in ['kmeans', 'kdtrees']:
yield check_index_type, index_type | [
[
[
96,
104
],
[
1333,
1341
],
[
1767,
1775
],
[
3094,
3102
]
],
[
[
113,
124
],
[
741,
743
],
[
1052,
1054
],
[
2647,
2649
],
[
2787,
2789
],
[
3779,
3781
],
[
4019,
4021
]
],
[
[
151,
166
],
[
3897,
3912
],
[
1604,
1619
],
[
2028,
2043
],
[
3448,
3463
],
[
4439,
4454
]
],
[
[
168,
181
],
[
811,
824
],
[
919,
932
],
[
2682,
2695
]
],
[
[
183,
195
],
[
553,
565
]
],
[
[
221,
231
],
[
1491,
1501
],
[
1915,
1925
]
],
[
[
267,
272
],
[
3916,
3921
]
],
[
[
274,
279
],
[
2863,
2868
],
[
4095,
4100
],
[
3757,
3762
]
],
[
[
281,
291
],
[
2852,
2862
],
[
4084,
4094
],
[
3746,
3756
]
],
[
[
323,
331
]
],
[
[
333,
357
],
[
837,
861
],
[
945,
969
],
[
2148,
2172
],
[
2421,
2445
],
[
2708,
2732
],
[
3838,
3862
],
[
1380,
1404
],
[
1814,
1838
],
[
3333,
3357
],
[
4363,
4387
]
],
[
[
359,
368
],
[
1151,
1160
],
[
2284,
2293
],
[
2542,
2551
],
[
3653,
3662
],
[
3934,
3943
]
],
[
[
400,
417
],
[
570,
587
]
],
[
[
437,
451
]
],
[
[
456,
466
],
[
1303,
1313
],
[
1737,
1747
]
],
[
[
499,
509
],
[
1303,
1313
],
[
1737,
1747
]
],
[
[
523,
545
]
],
[
[
698,
729
]
],
[
[
1024,
1038
]
],
[
[
2616,
2635
]
],
[
[
2750,
2772
]
],
[
[
3602,
3623
]
],
[
[
3981,
4004
]
]
] |
"""
This compat modules is a wrapper of the core os module that forbids usage of specific operations
(e.g. chown, chmod, getuid) that would be harmful to the Windows file security model of Certbot.
This module is intended to replace standard os module throughout certbot projects (except acme).
"""
# pylint: disable=function-redefined
from __future__ import absolute_import
# First round of wrapping: we import statically all public attributes exposed by the os module
# This allows in particular to have pylint, mypy, IDEs be aware that most of os members are
# available in certbot.compat.os.
from os import * # type: ignore # pylint: disable=wildcard-import,unused-wildcard-import,redefined-builtin,os-module-forbidden
# Second round of wrapping: we import dynamically all attributes from the os module that have not
# yet been imported by the first round (static import). This covers in particular the case of
# specific python 3.x versions where not all public attributes are in the special __all__ of os,
# and so not in `from os import *`.
import os as std_os # pylint: disable=os-module-forbidden
import sys as std_sys
ourselves = std_sys.modules[__name__]
for attribute in dir(std_os):
# Check if the attribute does not already exist in our module. It could be internal attributes
# of the module (__name__, __doc__), or attributes from standard os already imported with
# `from os import *`.
if not hasattr(ourselves, attribute):
setattr(ourselves, attribute, getattr(std_os, attribute))
# Similar to os.path, allow certbot.compat.os.path to behave as a module
std_sys.modules[__name__ + '.path'] = path
# Clean all remaining importables that are not from the core os module.
del ourselves, std_os, std_sys
# Chmod is the root of all evil for our security model on Windows. With the default implementation
# of os.chmod on Windows, almost all bits on mode will be ignored, and only a general RO or RW will
# be applied. The DACL, the inner mechanism to control file access on Windows, will stay on its
# default definition, giving effectively at least read permissions to any one, as the default
# permissions on root path will be inherit by the file (as NTFS state), and root path can be read
# by anyone. So the given mode needs to be translated into a secured and not inherited DACL that
# will be applied to this file using filesystem.chmod, calling internally the win32security
# module to construct and apply the DACL. Complete security model to translate a POSIX mode into
# a suitable DACL on Windows for Certbot can be found here:
# https://github.com/certbot/certbot/issues/6356
# Basically, it states that appropriate permissions will be set for the owner, nothing for the
# group, appropriate permissions for the "Everyone" group, and all permissions to the
# "Administrators" group + "System" user, as they can do everything anyway.
def chmod(*unused_args, **unused_kwargs): # pylint: disable=function-redefined
"""Method os.chmod() is forbidden"""
raise RuntimeError('Usage of os.chmod() is forbidden. '
'Use certbot.compat.filesystem.chmod() instead.')
# Because of the blocking strategy on file handlers on Windows, rename does not behave as expected
# with POSIX systems: an exception will be raised if dst already exists.
def rename(*unused_args, **unused_kwargs):
"""Method os.rename() is forbidden"""
raise RuntimeError('Usage of os.rename() is forbidden. '
'Use certbot.compat.filesystem.replace() instead.')
# Behavior of os.replace is consistent between Windows and Linux. However, it is not supported on
# Python 2.x. So, as for os.rename, we forbid it in favor of filesystem.replace.
def replace(*unused_args, **unused_kwargs):
"""Method os.replace() is forbidden"""
raise RuntimeError('Usage of os.replace() is forbidden. '
'Use certbot.compat.filesystem.replace() instead.')
| [
[
[
359,
374
]
],
[
[
612,
613
],
[
1640,
1644
]
],
[
[
1059,
1071
],
[
1192,
1198
],
[
1508,
1514
],
[
1733,
1739
]
],
[
[
1118,
1132
],
[
1145,
1152
],
[
1602,
1609
],
[
1741,
1748
]
],
[
[
1133,
1142
],
[
1439,
1448
],
[
1478,
1487
],
[
1722,
1731
]
],
[
[
1175,
1184
],
[
1450,
1459
],
[
1489,
1498
],
[
1516,
1525
]
],
[
[
2894,
2899
]
],
[
[
3322,
3328
]
],
[
[
3724,
3731
]
]
] |
from commons import *
import os
def pgp_check():
init_directory('./temp')
# gpg must exist on your system
status = os.system('gpg --version')
if status==0:
print_up('gpg is found')
else:
print_err('can\'t find gpg')
def verify_publickey_message(pk, msg):
# obtain a temp filename
fn = get_random_hex_string(10)
# save the public key file and the message file
pkfn = f'./temp/{fn}.pk'
pkbinfn = pkfn+'.gpg'
msgfn = f'./temp/{fn}.msg'
writefile(pkfn, pk, mode='w', encoding='utf-8')
writefile(msgfn, msg, mode='w', encoding='utf-8')
def cleanup():
removefile(pkfn)
removefile(msgfn)
removefile(pkbinfn)
# remove armor
status = os.system(f'gpg --dearmor {pkfn}')
if status != 0:
qprint('status:', status)
cleanup()
raise Exception('failed to dearmor the public key (there might be something wrong with your public key)')
# verify
status = os.system(f'gpg --no-default-keyring --keyring {pkbinfn} --verify {msgfn}')
if status != 0:
qprint('status:', status)
cleanup()
raise Exception('failed to verify the message (your public key is okay but the signature you supplied does not match the public key, or is of a wrong format)')
cleanup()
return True
| [
[
[
20,
21
],
[
54,
68
],
[
182,
190
],
[
225,
234
],
[
332,
353
],
[
502,
511
],
[
554,
563
],
[
799,
805
],
[
1088,
1094
],
[
632,
642
],
[
657,
667
],
[
683,
693
]
],
[
[
29,
31
],
[
129,
131
],
[
736,
738
],
[
984,
986
]
],
[
[
37,
46
]
],
[
[
259,
283
]
]
] |
import unittest
import math
from Include.Tuple import *
#
# Tuple Unit tests
#
class TestTuplePointVector(unittest.TestCase):
def test_Tuple_ifWArgumentIsOneTupleIsPoint(self):
self.a = Tuple(4.3, -4.2, 3.1, 1.0)
self.assertEqual(self.a.x, 4.3)
self.assertEqual(self.a.y, -4.2)
self.assertEqual(self.a.z, 3.1)
self.assertEqual(self.a.w, 1.0)
self.assertEqual(self.a.get_type(), TupleTypes.POINT)
self.assertNotEqual(self.a.get_type(), TupleTypes.VECTOR)
def test_Tuple_ifWArgumentIsZeroTupleIsVector(self):
self.a = Tuple(4.3, -4.2, 3.1, 0.0)
self.assertEqual(self.a.x, 4.3)
self.assertEqual(self.a.y, -4.2)
self.assertEqual(self.a.z, 3.1)
self.assertEqual(self.a.w, 0.0)
self.assertEqual(self.a.get_type(), TupleTypes.VECTOR)
self.assertNotEqual(self.a.get_type(), TupleTypes.POINT)
class TestTupleArithmetic(unittest.TestCase):
def test_Tuple_addTwoTuples(self):
self.a1 = Tuple(3, -2, 5, 1)
self.a2 = Tuple(-2, 3, 1, 0)
self.result = self.a1 + self.a2
self.assertEqual(self.result, Tuple(1, 1, 6, 1))
def test_Tuple_subtractTwoPoints(self):
self.p1 = point(3, 2, 1)
self.p2 = point(5, 6, 7)
self.result = self.p1 - self.p2
self.assertEqual(self.result, vector(-2, -4, -6))
def test_Tuple_subtractAVectorFromAPoint(self):
self.p = point(3, 2, 1)
self.v = vector(5, 6, 7)
self.result = self.p - self.v
self.assertEqual(self.result, point(-2, -4, -6))
def test_Tuple_subtractTwoVectors(self):
self.v1 = vector(3, 2, 1)
self.v2 = vector(5, 6, 7)
self.result = self.v1 - self.v2
self.assertEqual(self.result, vector(-2, -4, -6))
def test_Tuple_subtractVectorFromZeroVector(self):
self.zero = vector(0, 0, 0)
self.v = vector(1, -2, 3)
self.result = self.zero - self.v
self.assertEqual(self.result, vector(-1, 2, -3))
def test_Tuple_negateATuple(self):
self.a = Tuple(1, -2, 3, -4)
self.result = -self.a
self.assertEqual(self.result, Tuple(-1, 2, -3, 4))
def test_Tuple_multiplyATupleByAScalar(self):
self.a = Tuple(1, -2, 3, -4)
self.result = self.a * 3.5
self.assertEqual(self.result, Tuple(3.5, -7, 10.5, -14))
def test_Tuple_multiplyATupleByAFraction(self):
self.a = Tuple(1, -2, 3, -4)
self.result = self.a * 0.5
self.assertEqual(self.result, Tuple(0.5, -1, 1.5, -2))
def test_Tuple_divideATupleByAScalar(self):
self.a = Tuple(1, -2, 3, -4)
self.result = self.a / 2
self.assertEqual(self.result, Tuple(0.5, -1, 1.5, -2))
class TestTupleMagnitude(unittest.TestCase):
def test_Tuple_computeTheMagnitudeWithVectorXComponentOne(self):
self.v = vector(1, 0, 0)
self.result = self.v.magnitude()
self.assertEqual(self.result, 1)
def test_Tuple_computeTheMagnitudeWithVectorYComponentOne(self):
self.v = vector(0, 1, 0)
self.result = self.v.magnitude()
self.assertEqual(self.result, 1)
def test_Tuple_computeTheMagnitudeWithVectorZComponentOne(self):
self.v = vector(0, 0, 1)
self.result = self.v.magnitude()
self.assertEqual(self.result, 1)
def test_Tuple_computeTheMagnitudeWithVectorOneTwoThree(self):
self.v = vector(1, 2, 3)
self.result = self.v.magnitude()
self.assertEqual(self.result, math.sqrt(14))
def test_Tuple_computeTheMagnitudeWithVectorMinusOneTwoThree(self):
self.v = vector(-1, -2, -3)
self.result = self.v.magnitude()
self.assertEqual(self.result, math.sqrt(14))
class TestTupleNormalize(unittest.TestCase):
def test_Tuple_normalizeVectorWithXAsFour(self):
self.v = vector(4, 0, 0)
self.result = self.v.normalize()
self.assertEqual(self.result, vector(1, 0, 0))
def test_Tuple_normalizeVectorMinusOneTwoThree(self):
self.v = vector(1, 2, 3)
self.magnitude = math.sqrt(14)
self.result = self.v.normalize()
self.assertEqual(self.result, vector(1/self.magnitude, 2/self.magnitude, 3/self.magnitude))
def test_Tuple_computeMagnitudeOfNormalizedVector(self):
self.v = vector(1, 2, 3)
self.norm = self.v.normalize()
self.result = self.norm.magnitude()
self.assertEqual(self.result, 1)
class TestTupleDotProduct(unittest.TestCase):
def test_Tuple_theDotProductOfTwoTuples(self):
self.a = vector(1, 2, 3)
self.b = vector(2, 3, 4)
self.result = self.a.dot(self.b)
self.assertEqual(self.result, 20)
class TestTupleCrossProduct(unittest.TestCase):
def test_Tuple_theCrossProductOfTwoVectors(self):
self.a = vector(1, 2, 3)
self.b = vector(2, 3, 4)
self.result1 = self.a.cross(self.b)
self.result2 = self.b.cross(self.a)
self.assertEqual(self.result1, vector(-1, 2, -1))
self.assertEqual(self.result2, vector(1, -2, 1))
#
# Color Struct Unit test
#
class TestTupleColor(unittest.TestCase):
def test_Color_createsAColor(self):
self.c = Color(-0.5, 0.4, 1.7)
self.assertEqual(self.c.red, -0.5)
self.assertEqual(self.c.green, 0.4)
self.assertEqual(self.c.blue, 1.7)
def test_Color_AddColors(self):
self.c1 = Color(0.9, 0.6, 0.75)
self.c2 = Color(0.7, 0.1, 0.25)
self.result = self.c1 + self.c2
self.assertEqual(self.result, Color(1.6, 0.7, 1.0))
def test_Color_SubtractColors(self):
self.c1 = Color(0.9, 0.6, 0.75)
self.c2 = Color(0.6, 0.1, 0.25)
self.result = self.c1 - self.c2
self.assertEqual(self.result, Color(0.3, 0.5, 0.5))
def test_Color_MultiplyColorWithScalar(self):
self.c = Color(0.2, 0.3, 0.4)
self.result = self.c * 2
self.assertEqual(self.result, Color(0.4, 0.6, 0.8))
def test_Color_MultiplyingColors(self):
self.c1 = Color(1, 0.2, 0.4)
self.c2 = Color(0.9, 1, 0.1)
self.result = self.c1 * self.c2
self.assertEqual(self.result, Color(0.9, 0.2, 0.04))
def test_Tuple_reflectingAVectorApproachingAt45Degrees(self):
self.v = vector(1, -1, 0)
self.n = vector(0, 1, 0)
self.r = self.v.reflect(self.n)
self.assertEqual(self.r, vector(1, 1, 0))
def test_Tuple_reflectingAVectorOffASlantedSurface(self):
self.v = vector(0, -1, 0)
self.n = vector(math.sqrt(2)/2, math.sqrt(2)/2, 0)
self.r = self.v.reflect(self.n)
self.assertEqual(self.r, vector(1, 0, 0))
#
# Point function Unit tests
#
class TestTuplePoint(unittest.TestCase):
def test_point_functionCreatesATupleAsAPoint(self):
self.point = point(4, -4, 3)
self.assertEqual(self.point, Tuple(4, -4, 3, 1))
#
# Vector function Unit tests
#
class TestTupleVector(unittest.TestCase):
def test_vector_functionCreatesATupleAsAVector(self):
self.vector = vector(4, -4, 3)
self.assertEqual(self.vector, Tuple(4, -4, 3, 0))
if __name__ == '__main__':
unittest.main()
| [
[
[
7,
15
],
[
108,
116
],
[
934,
942
],
[
2772,
2780
],
[
3770,
3778
],
[
4490,
4498
],
[
4739,
4747
],
[
5133,
5141
],
[
6731,
6739
],
[
6957,
6965
],
[
7164,
7172
]
],
[
[
23,
27
],
[
3526,
3530
],
[
3729,
3733
],
[
4089,
4093
],
[
6549,
6553
],
[
6565,
6569
]
],
[
[
55,
56
],
[
200,
205
],
[
432,
442
],
[
497,
507
],
[
591,
596
],
[
823,
833
],
[
889,
899
],
[
1011,
1016
],
[
1048,
1053
],
[
1145,
1150
],
[
1227,
1232
],
[
1260,
1265
],
[
1353,
1359
],
[
1443,
1448
],
[
1475,
1481
],
[
1567,
1572
],
[
1650,
1656
],
[
1684,
1690
],
[
1778,
1784
],
[
1874,
1880
],
[
1907,
1913
],
[
2003,
2009
],
[
2079,
2084
],
[
2167,
2172
],
[
2256,
2261
],
[
2349,
2354
],
[
2446,
2451
],
[
2539,
2544
],
[
2630,
2635
],
[
2721,
2726
],
[
2878,
2884
],
[
3063,
3069
],
[
3248,
3254
],
[
3431,
3437
],
[
3631,
3637
],
[
3860,
3866
],
[
3955,
3961
],
[
4048,
4054
],
[
4182,
4188
],
[
4323,
4329
],
[
4578,
4584
],
[
4611,
4617
],
[
4830,
4836
],
[
4863,
4869
],
[
5006,
5012
],
[
5064,
5070
],
[
5210,
5215
],
[
5417,
5422
],
[
5457,
5462
],
[
5557,
5562
],
[
5639,
5644
],
[
5679,
5684
],
[
5779,
5784
],
[
5869,
5874
],
[
5961,
5966
],
[
6046,
6051
],
[
6083,
6088
],
[
6180,
6185
],
[
6287,
6293
],
[
6321,
6327
],
[
6411,
6417
],
[
6508,
6514
],
[
6542,
6548
],
[
6658,
6664
],
[
6828,
6833
],
[
6881,
6886
],
[
7057,
7063
],
[
7112,
7117
]
],
[
[
87,
107
]
],
[
[
914,
933
]
],
[
[
2753,
2771
]
],
[
[
3751,
3769
]
],
[
[
4470,
4489
]
],
[
[
4717,
4738
]
],
[
[
5118,
5132
]
],
[
[
6716,
6730
]
],
[
[
6941,
6956
]
]
] |
from collections.abc import Mapping
import numpy as np
from pickydict import PickyDict
from .utils import load_known_key_conversions
_key_regex_replacements = {r"\s": "_",
r"[!?.,;:]": ""}
_key_replacements = load_known_key_conversions()
class Metadata:
"""Class to handle spectrum metadata in matchms.
Metadata entries will be stored as PickyDict dictionary in `metadata.data`.
Unlike normal Python dictionaries, not all key names will be accepted.
Key names will be forced to be lower-case to avoid confusions between key such
as "Precursor_MZ" and "precursor_mz".
To avoid the default harmonization of the metadata dictionary use the option
`matchms_key_style=False`.
Code example:
.. code-block:: python
metadata = Metadata({"Precursor_MZ": 201.5, "Compound Name": "SuperStuff"})
print(metadata["precursor_mz"]) # => 201.5
print(metadata["compound_name"]) # => SuperStuff
Or if the matchms default metadata harmonization should not take place:
.. code-block:: python
metadata = Metadata({"Precursor_MZ": 201.5, "Compound Name": "SuperStuff"},
matchms_key_style=False)
print(metadata["precursor_mz"]) # => 201.5
print(metadata["compound_name"]) # => None (now you need to use "compound name")
"""
def __init__(self, metadata: dict = None,
matchms_key_style: bool = True):
"""
Parameters
----------
metadata:
Spectrum metadata as a dictionary.
matchms_key_style:
Set to False if metadata harmonization to default keys is not desired.
The default is True.
"""
if metadata is None:
self._data = PickyDict({})
elif isinstance(metadata, Mapping):
self._data = PickyDict(metadata)
else:
raise ValueError("Unexpected data type for metadata (should be dictionary, or None).")
self.matchms_key_style = matchms_key_style
if self.matchms_key_style is True:
self.harmonize_metadata()
def __eq__(self, other_metadata):
if self.keys() != other_metadata.keys():
return False
for key, value in self.items():
if isinstance(value, np.ndarray):
if not np.all(value == other_metadata.get(key)):
return False
elif value != other_metadata.get(key):
return False
return True
def harmonize_metadata(self):
"""Runs default harmonization of metadata.
Method harmonized metadata field names which includes setting them to lower-case
and runing a series of regex replacements followed by default field name
replacements (such as precursor_mass --> precursor_mz).
"""
self._data.key_regex_replacements = _key_regex_replacements
self._data.key_replacements = _key_replacements
# ------------------------------
# Getters and Setters
# ------------------------------
def get(self, key: str, default=None):
"""Retrieve value from :attr:`metadata` dict.
"""
return self._data.copy().get(key, default)
def set(self, key: str, value):
"""Set value in :attr:`metadata` dict.
"""
self._data[key] = value
if self.matchms_key_style is True:
self.harmonize_metadata()
return self
def keys(self):
"""Retrieve all keys of :attr:`.metadata` dict.
"""
return self._data.keys()
def values(self):
"""Retrieve all values of :attr:`.metadata` dict.
"""
return self._data.values()
def items(self):
"""Retrieve all items (key, value pairs) of :attr:`.metadata` dict.
"""
return self._data.items()
def __getitem__(self, key=None):
return self.get(key)
def __setitem__(self, key, newvalue):
self.set(key, newvalue)
@property
def data(self):
return self._data.copy()
@data.setter
def data(self, new_dict):
if isinstance(new_dict, PickyDict):
self._data = new_dict
elif isinstance(new_dict, Mapping):
self._data = PickyDict(new_dict)
if self.matchms_key_style is True:
self.harmonize_metadata()
else:
raise TypeError("Expected input of type dict or PickyDict.")
| [
[
[
28,
35
],
[
1839,
1846
],
[
4250,
4257
]
],
[
[
43,
54
],
[
2327,
2329
],
[
2363,
2365
]
],
[
[
77,
86
],
[
1791,
1800
],
[
1874,
1883
],
[
4170,
4179
],
[
4285,
4294
]
],
[
[
106,
132
],
[
238,
264
]
],
[
[
135,
158
],
[
2916,
2939
]
],
[
[
218,
235
],
[
2978,
2995
]
],
[
[
275,
283
]
]
] |
"""
Base settings to build other settings files upon.
"""
import environ
ROOT_DIR = (
environ.Path(__file__) - 3
) # (webscrape/config/settings/base.py - 3 = webscrape/)
APPS_DIR = ROOT_DIR.path("webscrape")
env = environ.Env()
READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR.path(".env")))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = "UTC"
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = "en-us"
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# https://docs.djangoproject.com/en/dev/ref/settings/#locale-paths
LOCALE_PATHS = [ROOT_DIR.path("locale")]
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
# DATABASES = {
# "default": env.db("DATABASE_URL", default="postgres:///webscrape")
# }
# DATABASES["default"]["ATOMIC_REQUESTS"] = True
DATABASES = {
'default': {
'NAME': 'messaging',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'USER': 'messaging',
'PASSWORD': 'messaging',
'HOST': 'localhost',
'PORT': 5432,
'ATOMIC_REQUESTS': True
}
}
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = "config.urls"
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = "config.wsgi.application"
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"django.contrib.staticfiles",
# "django.contrib.humanize", # Handy template tags
"django.contrib.admin",
]
THIRD_PARTY_APPS = [
"rest_framework",
]
LOCAL_APPS = [
"webscrape.application.apps.ApplicationConfig",
# Your stuff: custom apps go here
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {"sites": "webscrape.contrib.sites.migrations"}
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
# AUTH_USER_MODEL = "users.User"
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
# LOGIN_REDIRECT_URL = "users:redirect"
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
# LOGIN_URL = "account_login"
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
"django.contrib.auth.hashers.Argon2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR("staticfiles"))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = "/static/"
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [str(APPS_DIR.path("static"))]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
]
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR("media"))
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = "/media/"
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
"BACKEND": "django.template.backends.django.DjangoTemplates",
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
"DIRS": [str(APPS_DIR.path("templates"))],
"OPTIONS": {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
"loaders": [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"webscrape.utils.context_processors.settings_context",
],
},
}
]
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = "bootstrap4"
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),)
# SECURITY
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-httponly
SESSION_COOKIE_HTTPONLY = True
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-httponly
CSRF_COOKIE_HTTPONLY = True
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-browser-xss-filter
SECURE_BROWSER_XSS_FILTER = True
# https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options
X_FRAME_OPTIONS = "DENY"
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env(
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.smtp.EmailBackend"
)
# https://docs.djangoproject.com/en/2.2/ref/settings/#email-timeout
EMAIL_TIMEOUT = 5
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL.
ADMIN_URL = "admin/"
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [("""Sukant Priyadarshi""", "sukant1994@gmail.com")]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# LOGGING
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#logging
# See https://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s "
"%(process)d %(thread)d %(message)s"
}
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
}
},
"root": {"level": "INFO", "handlers": ["console"]},
}
# django-allauth
# ------------------------------------------------------------------------------
ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True)
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_AUTHENTICATION_METHOD = "username"
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_REQUIRED = True
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
# https://django-allauth.readthedocs.io/en/latest/configuration.html
# ACCOUNT_ADAPTER = "webscrape.users.adapters.AccountAdapter"
# https://django-allauth.readthedocs.io/en/latest/configuration.html
# SOCIALACCOUNT_ADAPTER = "webscrape.users.adapters.SocialAccountAdapter"
# Your stuff...
# ------------------------------------------------------------------------------
| [
[
[
66,
73
],
[
92,
99
],
[
222,
229
]
],
[
[
75,
83
],
[
188,
196
],
[
424,
432
],
[
1341,
1349
],
[
5529,
5537
]
],
[
[
177,
185
],
[
5761,
5769
],
[
6192,
6200
],
[
6734,
6742
],
[
8145,
8153
]
],
[
[
216,
219
],
[
257,
260
],
[
407,
410
],
[
608,
611
],
[
8861,
8864
],
[
10244,
10247
]
],
[
[
237,
254
],
[
312,
329
]
],
[
[
600,
605
]
],
[
[
841,
850
]
],
[
[
927,
940
]
],
[
[
1013,
1020
]
],
[
[
1088,
1096
]
],
[
[
1167,
1175
]
],
[
[
1244,
1250
]
],
[
[
1325,
1337
]
],
[
[
1668,
1677
]
],
[
[
2097,
2109
]
],
[
[
2197,
2213
]
],
[
[
2331,
2342
],
[
2857,
2868
]
],
[
[
2618,
2634
],
[
2871,
2887
]
],
[
[
2664,
2674
],
[
2890,
2900
]
],
[
[
2840,
2854
]
],
[
[
3068,
3085
]
],
[
[
3313,
3336
]
],
[
[
3926,
3942
]
],
[
[
4354,
4378
]
],
[
[
4892,
4902
]
],
[
[
5511,
5522
]
],
[
[
5619,
5629
]
],
[
[
5737,
5753
]
],
[
[
5872,
5891
]
],
[
[
6175,
6185
]
],
[
[
6275,
6284
]
],
[
[
6455,
6464
]
],
[
[
7929,
7949
]
],
[
[
8125,
8137
]
],
[
[
8345,
8368
]
],
[
[
8451,
8471
]
],
[
[
8559,
8584
]
],
[
[
8662,
8677
]
],
[
[
8845,
8858
]
],
[
[
9018,
9031
]
],
[
[
9146,
9155
]
],
[
[
9228,
9234
],
[
9364,
9370
]
],
[
[
9353,
9361
]
],
[
[
9651,
9658
]
],
[
[
10215,
10241
]
],
[
[
10365,
10394
]
],
[
[
10477,
10499
]
],
[
[
10576,
10602
]
]
] |
"This creates an HDF5 file with a potentially large number of objects"
import sys
import numpy
import tables
filename = sys.argv[1]
# Open a new empty HDF5 file
fileh = tables.open_file(filename, mode="w")
# nlevels -- Number of levels in hierarchy
# ngroups -- Number of groups on each level
# ndatasets -- Number of arrays on each group
# LR: Low ratio groups/datasets
#nlevels, ngroups, ndatasets = (3, 1, 1000)
# MR: Medium ratio groups/datasets
nlevels, ngroups, ndatasets = (3, 10, 100)
#nlevels, ngroups, ndatasets = (3, 5, 10)
# HR: High ratio groups/datasets
#nlevels, ngroups, ndatasets = (30, 10, 10)
# Create an Array to save on disk
a = numpy.array([-1, 2, 4], numpy.int16)
group = fileh.root
group2 = fileh.root
for k in range(nlevels):
for j in range(ngroups):
for i in range(ndatasets):
# Save the array on the HDF5 file
fileh.create_array(group2, 'array' + str(i),
a, "Signed short array")
# Create a new group
group2 = fileh.create_group(group, 'group' + str(j))
# Create a new group
group3 = fileh.create_group(group, 'ngroup' + str(k))
# Iterate over this new group (group3)
group = group3
group2 = group3
fileh.close()
| [
[
[
79,
82
],
[
122,
125
]
],
[
[
90,
95
],
[
655,
660
],
[
679,
684
]
],
[
[
103,
109
],
[
172,
178
]
],
[
[
111,
119
],
[
189,
197
]
],
[
[
164,
169
],
[
701,
706
],
[
721,
726
],
[
879,
884
],
[
1026,
1031
],
[
1108,
1113
],
[
1236,
1241
]
],
[
[
454,
461
],
[
747,
754
]
],
[
[
463,
470
],
[
776,
783
]
],
[
[
472,
481
],
[
809,
818
]
],
[
[
651,
652
],
[
955,
956
]
],
[
[
693,
698
],
[
1045,
1050
],
[
1127,
1132
]
],
[
[
712,
718
],
[
898,
904
]
],
[
[
736,
737
],
[
1149,
1150
]
],
[
[
765,
766
],
[
1066,
1067
]
],
[
[
798,
799
],
[
920,
921
]
],
[
[
1017,
1023
],
[
898,
904
]
],
[
[
1099,
1105
],
[
1208,
1214
],
[
1228,
1234
]
],
[
[
1200,
1205
],
[
1045,
1050
],
[
1127,
1132
]
],
[
[
1219,
1225
],
[
898,
904
]
]
] |
import logging
import os
import unittest
import pypesto
import pypesto.logging
class LoggingTest(unittest.TestCase):
def test_optimize(self):
# logging
pypesto.logging.log_to_console(logging.WARN)
filename = ".test_logging.tmp"
pypesto.logging.log_to_file(logging.DEBUG, filename)
logger = logging.getLogger('pypesto')
if os.path.exists(filename):
os.remove(filename)
fh = logging.FileHandler(filename)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
logger.info("start test")
# problem definition
def fun(_):
raise Exception("This function cannot be called.")
objective = pypesto.Objective(fun=fun)
problem = pypesto.Problem(objective, -1, 1)
optimizer = pypesto.ScipyOptimizer()
options = {'allow_failed_starts': True}
# optimization
pypesto.minimize(problem, optimizer, 5, options=options)
# assert logging worked
self.assertTrue(os.path.exists(filename))
f = open(filename, 'rb')
content = str(f.read())
f.close()
# tidy up
os.remove(filename)
# check if error message got inserted
self.assertTrue("fail" in content)
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(LoggingTest())
unittest.main()
| [
[
[
7,
14
],
[
206,
213
],
[
295,
302
],
[
337,
344
],
[
448,
455
],
[
498,
505
]
],
[
[
22,
24
],
[
377,
379
],
[
415,
417
],
[
1030,
1032
],
[
1166,
1168
]
],
[
[
32,
40
],
[
99,
107
],
[
1317,
1325
],
[
1375,
1383
]
],
[
[
48,
55
]
],
[
[
63,
78
],
[
175,
182
],
[
267,
274
],
[
711,
718
],
[
756,
763
],
[
811,
818
],
[
916,
923
]
],
[
[
87,
98
],
[
1356,
1367
]
],
[
[
1309,
1314
],
[
1342,
1347
]
]
] |
from django.urls import path
from .views import audit_view
urlpatterns = [
path('', audit_view, name="audit")
] | [
[
[
24,
28
],
[
80,
84
]
],
[
[
48,
58
],
[
89,
99
]
],
[
[
60,
71
]
]
] |
from cashbook.models import CashBookTransaction
from controls.models import ModuleSettings, Period
from django.contrib.auth.mixins import LoginRequiredMixin
from django.db.models import F, OuterRef, Subquery, Sum
from django.db.models.functions import Coalesce
from django.views.generic import TemplateView
from purchases.models import PurchaseHeader, PurchaseMatching
from sales.models import SaleHeader, SaleMatching
class TotalOwedReport:
def __init__(self, header_model, match_model):
self.header_model = header_model
self.match_model = match_model
def _report(self, matched_by, matched_to, types, period_subquery):
return (
self.header_model
.objects
.filter(type__in=types)
.filter(period__fy_and_period__in=Subquery(period_subquery))
.annotate(
mbt=Coalesce(
Subquery(
matched_by.values('matched_by_total')
),
0
)
)
.annotate(
mtt=Coalesce(
Subquery(
matched_to.values('matched_to_total')
),
0
)
)
.annotate(
actual_due=F('due') + F('mbt') + F('mtt')
)
)
def _report_per_period_for_last_5_periods(self, matched_by, matched_to, types, period):
period_subquery = (
Period
.objects
.filter(fy_and_period__lte=period.fy_and_period)
.values('fy_and_period')
.order_by("-fy_and_period")
[:5]
)
q = (
self
._report(matched_by, matched_to, types, period_subquery)
.values('period__fy_and_period')
.annotate(
total_due=Coalesce(Sum('actual_due'), 0)
)
)
report = {}
for period in period_subquery:
report[period["fy_and_period"]] = 0
for period in q:
report[period["period__fy_and_period"]] = period["total_due"]
return report
def _report_for_all_periods_prior(self, matched_by, matched_to, types, period):
"""
Get the total owed for all periods prior to @period i.e. the total for 'Older'
"""
period_subquery = (
Period
.objects
.filter(fy_and_period__lte=period.fy_and_period)
.values('fy_and_period')
.order_by("-fy_and_period")
[5:]
)
return (
self
._report(matched_by, matched_to, types, period_subquery)
.aggregate(
total_due=Coalesce(Sum('actual_due'), 0)
)
)
def report(self, current_period):
"""
This is used by the dashboard and not the aged creditors report
"""
matched_by = (
self.match_model
.objects
.filter(period__fy_and_period__gt=current_period.fy_and_period)
.filter(matched_by=OuterRef('pk'))
.values('matched_by')
.annotate(matched_by_total=Sum('value') * -1)
)
matched_to = (
self.match_model
.objects
.filter(period__fy_and_period__gt=current_period.fy_and_period)
.filter(matched_to=OuterRef('pk'))
.values('matched_to')
.annotate(matched_to_total=Sum('value'))
)
non_payment_types = [
t[0]
for t in self.header_model.types
if t[0] not in self.header_model.payment_types
]
report_from_current_to_4_periods_ago = self._report_per_period_for_last_5_periods(
matched_by, matched_to, non_payment_types, current_period)
older = self._report_for_all_periods_prior(
matched_by, matched_to, non_payment_types, current_period)
report = []
labels = ["Current", "1 period ago", "2 periods ago", "3 periods ago", "4 periods ago"]
for i, (period, value) in enumerate(report_from_current_to_4_periods_ago.items()):
r = {
"period": labels[i],
"value": value
}
report.append(r)
report.append({
"period": "Older",
"value": older["total_due"]
})
report.reverse() # In UI we actually want 'Older' to show first from left to right i.e. opposite of list
return report
class DashBoard(LoginRequiredMixin, TemplateView):
template_name = "dashboard/dashboard.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
mod_settings = ModuleSettings.objects.first()
cash_book_period = mod_settings.cash_book_period
cash_book_in_and_out_report = (
CashBookTransaction
.objects
.cash_book_in_and_out_report(cash_book_period)
)
cash_book_in_and_out = []
for period in cash_book_in_and_out_report:
p = period["period__fy_and_period"]
o = {}
o["period"] = p[4:] + " " + p[:4]
o["in"] = period["total_monies_in"]
o["out"] = period["total_monies_out"]
cash_book_in_and_out.append(o)
context["cash_in_and_out"] = cash_book_in_and_out
owed_to_you = TotalOwedReport(
SaleHeader, SaleMatching).report(mod_settings.sales_period)
owed_by_you = TotalOwedReport(PurchaseHeader, PurchaseMatching).report(
mod_settings.purchases_period)
context["owed_to_you"] = owed_to_you
context["owed_by_you"] = owed_by_you
return context
| [
[
[
28,
47
],
[
4923,
4942
]
],
[
[
76,
90
],
[
4783,
4797
]
],
[
[
92,
98
],
[
1507,
1513
],
[
2414,
2420
]
],
[
[
138,
156
],
[
4582,
4600
]
],
[
[
186,
187
],
[
1318,
1319
],
[
1329,
1330
],
[
1340,
1341
]
],
[
[
189,
197
],
[
3130,
3138
],
[
3428,
3436
]
],
[
[
199,
207
],
[
797,
805
],
[
897,
905
],
[
1119,
1127
]
],
[
[
209,
212
],
[
1903,
1906
],
[
2769,
2772
],
[
3219,
3222
],
[
3517,
3520
]
],
[
[
252,
260
],
[
867,
875
],
[
1089,
1097
],
[
1894,
1902
],
[
2760,
2768
]
],
[
[
294,
306
],
[
4602,
4614
]
],
[
[
336,
350
],
[
5579,
5593
]
],
[
[
352,
368
],
[
5595,
5611
]
],
[
[
394,
404
],
[
5481,
5491
]
],
[
[
406,
418
],
[
5493,
5505
]
],
[
[
427,
442
],
[
5452,
5467
],
[
5563,
5578
]
],
[
[
4572,
4581
]
]
] |
import os
import errno
import certifi
import requests
from deriva.core import urlsplit, get_new_requests_session, stob, make_dirs, DEFAULT_SESSION_CONFIG
from deriva.transfer.download import DerivaDownloadError, DerivaDownloadConfigurationError, \
DerivaDownloadAuthenticationError, DerivaDownloadAuthorizationError
from deriva.transfer.download.processors.base_processor import BaseProcessor, \
LOCAL_PATH_KEY, FILE_SIZE_KEY, SOURCE_URL_KEY
from bdbag import bdbag_ro as ro
class BaseQueryProcessor(BaseProcessor):
"""
Base class for QueryProcessor classes
"""
HEADERS = {'Connection': 'keep-alive'}
def __init__(self, envars=None, **kwargs):
super(BaseQueryProcessor, self).__init__(envars, **kwargs)
self.catalog = kwargs["catalog"]
self.store = kwargs["store"]
self.base_path = kwargs["base_path"]
self.query = self.parameters["query_path"]
if self.envars:
self.query = self.query.format(**self.envars)
self.sub_path = self.parameters.get("output_path")
self.output_filename = self.parameters.get("output_filename")
self.store_base = kwargs.get("store_base", "/hatrac/")
self.is_bag = kwargs.get("bag", False)
self.sessions = kwargs.get("sessions", dict())
self.content_type = "application/octet-stream"
self.url = ''.join([self.catalog.get_server_uri(), self.query])
self.ro_file_provenance = stob(self.parameters.get("ro_file_provenance", False if not self.is_bag else True))
self.ro_manifest = self.kwargs.get("ro_manifest")
self.ro_author_name = self.kwargs.get("ro_author_name")
self.ro_author_orcid = self.kwargs.get("ro_author_orcid")
self.output_relpath = None
self.output_abspath = None
self.paged_query = self.parameters.get("paged_query", False)
self.paged_query_size = self.parameters.get("paged_query_size", 100000)
def process(self):
resp = self.catalogQuery(headers={'accept': self.content_type})
if os.path.isfile(self.output_abspath):
if self.ro_manifest and self.ro_file_provenance:
ro.add_file_metadata(self.ro_manifest,
source_url=self.url,
local_path=self.output_relpath,
media_type=self.content_type,
retrieved_on=ro.make_retrieved_on(),
retrieved_by=ro.make_retrieved_by(self.ro_author_name,
orcid=self.ro_author_orcid),
bundled_as=ro.make_bundled_as())
self.outputs.update({self.output_relpath: {LOCAL_PATH_KEY: self.output_abspath,
FILE_SIZE_KEY: os.path.getsize(self.output_abspath),
SOURCE_URL_KEY: self.url}})
return self.outputs
def catalogQuery(self, headers=None, as_file=True):
if not headers:
headers = self.HEADERS.copy()
else:
headers.update(self.HEADERS)
if as_file:
output_dir = os.path.dirname(self.output_abspath)
make_dirs(output_dir)
try:
if as_file:
return self.catalog.getAsFile(self.query, self.output_abspath,
headers=headers,
delete_if_empty=True,
paged=self.paged_query,
page_size=self.paged_query_size)
else:
return self.catalog.get(self.query, headers=headers).json()
except requests.HTTPError as e:
if e.response.status_code == 401:
raise DerivaDownloadAuthenticationError(e)
if e.response.status_code == 403:
raise DerivaDownloadAuthorizationError(e)
if as_file:
os.remove(self.output_abspath)
raise DerivaDownloadError("Error executing catalog query: %s" % e)
except Exception:
if as_file:
os.remove(self.output_abspath)
raise
def headForHeaders(self, url, raise_for_status=False):
store = self.getHatracStore(url)
if store:
r = store.head(url, headers=self.HEADERS)
if raise_for_status:
r.raise_for_status()
headers = r.headers
else:
url = self.getExternalUrl(url)
session = self.getExternalSession(urlsplit(url).hostname)
r = session.head(url, headers=self.HEADERS)
if raise_for_status:
r.raise_for_status()
headers = r.headers
return headers
def getHatracStore(self, url):
urlparts = urlsplit(url)
if not urlparts.path.startswith(self.store_base):
return None
if url.startswith(self.store_base):
return self.store
else:
serverURI = urlparts.scheme + "://" + urlparts.netloc
if serverURI == self.store.get_server_uri():
return self.store
else:
# do we need to deal with the possibility of a fully qualified URL referencing a different hatrac host?
raise DerivaDownloadConfigurationError(
"Got a reference to a Hatrac server [%s] that is different from the expected Hatrac server: %s" % (
serverURI, self.store.get_server_uri))
def getExternalUrl(self, url):
urlparts = urlsplit(url)
if urlparts.path.startswith(self.store_base):
path_only = url.startswith(self.store_base)
server_uri = urlparts.scheme + "://" + urlparts.netloc
if server_uri == self.store.get_server_uri() or path_only:
url = ''.join([self.store.get_server_uri(), url]) if path_only else url
else:
if not (urlparts.scheme and urlparts.netloc):
urlparts = urlsplit(self.catalog.get_server_uri())
server_uri = urlparts.scheme + "://" + urlparts.netloc
url = ''.join([server_uri, url])
return url
def getExternalSession(self, host):
sessions = self.sessions
auth_params = self.kwargs.get("auth_params", dict())
cookies = auth_params.get("cookies")
auth_url = auth_params.get("auth_url")
login_params = auth_params.get("login_params")
session_config = self.kwargs.get("session_config")
session = sessions.get(host)
if session is not None:
return session
if not session_config:
session_config = DEFAULT_SESSION_CONFIG
session = get_new_requests_session(session_config=session_config)
if cookies:
session.cookies.update(cookies)
if login_params and auth_url:
r = session.post(auth_url, data=login_params, verify=certifi.where())
if r.status_code > 203:
raise DerivaDownloadError(
'GetExternalSession Failed with Status Code: %s\n%s\n' % (r.status_code, r.text))
sessions[host] = session
return session
def create_default_paths(self):
self.output_relpath, self.output_abspath = self.create_paths(self.base_path,
sub_path=self.sub_path,
filename=self.output_filename,
ext=self.ext,
is_bag=self.is_bag,
envars=self.envars)
def __del__(self):
for session in self.sessions.values():
session.close()
class CSVQueryProcessor(BaseQueryProcessor):
def __init__(self, envars=None, **kwargs):
super(CSVQueryProcessor, self).__init__(envars, **kwargs)
self.ext = ".csv"
self.content_type = "text/csv"
self.create_default_paths()
class JSONQueryProcessor(BaseQueryProcessor):
def __init__(self, envars=None, **kwargs):
super(JSONQueryProcessor, self).__init__(envars, **kwargs)
self.ext = ".json"
self.content_type = "application/json"
self.create_default_paths()
class JSONStreamQueryProcessor(BaseQueryProcessor):
def __init__(self, envars=None, **kwargs):
super(JSONStreamQueryProcessor, self).__init__(envars, **kwargs)
self.ext = ".json"
self.content_type = "application/x-json-stream"
self.create_default_paths()
class JSONEnvUpdateProcessor(BaseQueryProcessor):
def __init__(self, envars=None, **kwargs):
super(JSONEnvUpdateProcessor, self).__init__(envars, **kwargs)
def process(self):
resp = self.catalogQuery(headers={'accept': "application/json"}, as_file=False)
if resp:
self.envars.update(resp[0])
self._urlencode_envars()
return {}
| [
[
[
7,
9
],
[
2051,
2053
],
[
2896,
2898
],
[
3269,
3271
],
[
4119,
4121
],
[
4295,
4297
]
],
[
[
17,
22
]
],
[
[
30,
37
],
[
7149,
7156
]
],
[
[
45,
53
],
[
3845,
3853
]
],
[
[
78,
86
],
[
4722,
4730
],
[
4983,
4991
],
[
5756,
5764
],
[
6205,
6213
]
],
[
[
88,
112
],
[
6925,
6949
]
],
[
[
114,
118
],
[
1453,
1457
]
],
[
[
120,
129
],
[
3318,
3327
]
],
[
[
131,
153
],
[
6884,
6906
]
],
[
[
191,
210
],
[
4168,
4187
],
[
7224,
7243
]
],
[
[
212,
244
],
[
5484,
5516
]
],
[
[
252,
285
],
[
3938,
3971
]
],
[
[
287,
319
],
[
4043,
4075
]
],
[
[
383,
396
],
[
510,
523
]
],
[
[
404,
418
],
[
2789,
2803
]
],
[
[
420,
433
],
[
2881,
2894
]
],
[
[
435,
449
],
[
2989,
3003
]
],
[
[
468,
482
],
[
2165,
2167
],
[
2448,
2450
],
[
2522,
2524
],
[
2712,
2714
]
],
[
[
491,
509
],
[
8105,
8123
],
[
8367,
8385
],
[
8645,
8663
],
[
8936,
8954
],
[
689,
707
]
],
[
[
8087,
8104
],
[
8187,
8204
]
],
[
[
8348,
8366
],
[
8449,
8467
]
],
[
[
8620,
8644
],
[
8727,
8751
]
],
[
[
8913,
8935
],
[
9018,
9040
]
]
] |
from app.routers.audio import router
AUDIO_SETTINGS_URL = router.url_path_for("audio_settings")
GET_CHOICES_URL = router.url_path_for("get_choices")
START_AUDIO_URL = router.url_path_for("start_audio")
def test_get_settings(audio_test_client):
response = audio_test_client.get(url=AUDIO_SETTINGS_URL)
assert response.ok
assert b"Audio Settings" in response.content
def test_start_audio_default(audio_test_client):
response = audio_test_client.get(START_AUDIO_URL)
assert response.ok
def test_choices_Off(audio_test_client):
data = {"music_on": False, "sfx_on": False}
response = audio_test_client.post(url=GET_CHOICES_URL, data=data)
assert response.ok
def test_choices_On(audio_test_client):
data = {
"music_on": True,
"music_choices": ["GASTRONOMICA.mp3"],
"music_vol": 50,
"sfx_on": True,
"sfx_choice": "click_1.wav",
"sfx_vol": 50,
}
response = audio_test_client.post(url=GET_CHOICES_URL, data=data)
assert response.ok
def test_start_audio(audio_test_client):
data = {
"music_on": True,
"music_choices": ["GASTRONOMICA.mp3"],
"music_vol": 50,
"sfx_on": True,
"sfx_choice": "click_1.wav",
"sfx_vol": 50,
}
audio_test_client.post(url=GET_CHOICES_URL, data=data)
response = audio_test_client.get(url=START_AUDIO_URL)
assert response.ok
def test_start_audio_sfx_off(audio_test_client):
data = {"music_on_off": "Off", "sfx_on_off": "Off"}
audio_test_client.post(url=GET_CHOICES_URL, data=data)
response = audio_test_client.get(url=START_AUDIO_URL)
assert response.ok
| [
[
[
30,
36
],
[
59,
65
],
[
115,
121
],
[
168,
174
]
],
[
[
38,
56
],
[
288,
306
]
],
[
[
97,
112
],
[
641,
656
],
[
977,
992
],
[
1303,
1318
],
[
1550,
1565
]
],
[
[
150,
165
],
[
468,
483
],
[
1372,
1387
],
[
1619,
1634
]
],
[
[
209,
226
]
],
[
[
386,
410
]
],
[
[
514,
530
]
],
[
[
698,
713
]
],
[
[
1034,
1050
]
],
[
[
1418,
1442
]
]
] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Enrique Fernandez
# Released under the BSD License.
#
# Authors:
# * Enrique Fernandez
import Tkinter
import rospy
from geometry_msgs.msg import Twist, Vector3
import numpy
class MouseTeleop():
def __init__(self):
# Retrieve params:
self._frequency = rospy.get_param('~frequency', 0.0)
self._scale = rospy.get_param('~scale', 1.0)
self._holonomic = rospy.get_param('~holonomic', False)
# Create twist publisher:
self._pub_cmd = rospy.Publisher('mouse_vel', Twist, queue_size=100)
# Initialize twist components to zero:
self._v_x = 0.0
self._v_y = 0.0
self._w = 0.0
# Initialize mouse position (x, y) to None (unknown); it's initialized
# when the mouse button is pressed on the _start callback that handles
# that event:
self._x = None
self._y = None
# Create window:
self._root = Tkinter.Tk()
self._root.title('Mouse Teleop')
# Make window non-resizable:
self._root.resizable(0, 0)
# Create canvas:
self._canvas = Tkinter.Canvas(self._root, bg='white')
# Create canvas objects:
self._canvas.create_arc(0, 0, 0, 0, fill='red', outline='red',
width=1, style=Tkinter.PIESLICE, start=90.0, tag='w')
self._canvas.create_line(0, 0, 0, 0, fill='blue', width=4, tag='v_x')
if self._holonomic:
self._canvas.create_line(0, 0, 0, 0,
fill='blue', width=4, tag='v_y')
# Create canvas text objects:
self._text_v_x = Tkinter.StringVar()
if self._holonomic:
self._text_v_y = Tkinter.StringVar()
self._text_w = Tkinter.StringVar()
self._label_v_x = Tkinter.Label(self._root,
anchor=Tkinter.W, textvariable=self._text_v_x)
if self._holonomic:
self._label_v_y = Tkinter.Label(self._root,
anchor=Tkinter.W, textvariable=self._text_v_y)
self._label_w = Tkinter.Label(self._root,
anchor=Tkinter.W, textvariable=self._text_w)
if self._holonomic:
self._text_v_x.set('v_x = %0.2f m/s' % self._v_x)
self._text_v_y.set('v_y = %0.2f m/s' % self._v_y)
self._text_w.set( 'w = %0.2f deg/s' % self._w)
else:
self._text_v_x.set('v = %0.2f m/s' % self._v_x)
self._text_w.set( 'w = %0.2f deg/s' % self._w)
self._label_v_x.pack()
if self._holonomic:
self._label_v_y.pack()
self._label_w.pack()
# Bind event handlers:
self._canvas.bind('<Button-1>', self._start)
self._canvas.bind('<ButtonRelease-1>', self._release)
self._canvas.bind('<Configure>', self._configure)
if self._holonomic:
self._canvas.bind('<B1-Motion>', self._mouse_motion_linear)
self._canvas.bind('<Shift-B1-Motion>', self._mouse_motion_angular)
self._root.bind('<Shift_L>', self._change_to_motion_angular)
self._root.bind('<KeyRelease-Shift_L>',
self._change_to_motion_linear)
else:
self._canvas.bind('<B1-Motion>', self._mouse_motion_angular)
self._canvas.pack()
# If frequency is positive, use synchronous publishing mode:
if self._frequency > 0.0:
# Create timer for the given frequency to publish the twist:
period = rospy.Duration(1.0 / self._frequency)
self._timer = rospy.Timer(period, self._publish_twist)
# Start window event manager main loop:
self._root.mainloop()
def __del__(self):
if self._frequency > 0.0:
self._timer.shutdown()
self._root.quit()
def _start(self, event):
self._x, self._y = event.y, event.x
self._y_linear = self._y_angular = 0
self._v_x = self._v_y = self._w = 0.0
def _release(self, event):
self._v_x = self._v_y = self._w = 0.0
self._send_motion()
def _configure(self, event):
self._width, self._height = event.height, event.width
self._c_x = self._height / 2.0
self._c_y = self._width / 2.0
self._r = min(self._height, self._width) * 0.25
def _mouse_motion_linear(self, event):
self._v_x, self._v_y = self._relative_motion(event.y, event.x)
self._send_motion()
def _mouse_motion_angular(self, event):
self._v_x, self._w = self._relative_motion(event.y, event.x)
self._send_motion()
def _update_coords(self, tag, x0, y0, x1, y1):
x0 += self._c_x
y0 += self._c_y
x1 += self._c_x
y1 += self._c_y
self._canvas.coords(tag, (x0, y0, x1, y1))
def _draw_v_x(self, v):
x = -v * float(self._width)
self._update_coords('v_x', 0, 0, 0, x)
def _draw_v_y(self, v):
y = -v * float(self._height)
self._update_coords('v_y', 0, 0, y, 0)
def _draw_w(self, w):
x0 = y0 = -self._r
x1 = y1 = self._r
self._update_coords('w', x0, y0, x1, y1)
yaw = w * numpy.rad2deg(self._scale)
self._canvas.itemconfig('w', extent=yaw)
def _send_motion(self):
v_x = self._v_x * self._scale
v_y = self._v_y * self._scale
w = self._w * self._scale
linear = Vector3(v_x, v_y, 0.0)
angular = Vector3(0.0, 0.0, w)
self._draw_v_x(self._v_x)
if self._holonomic:
self._draw_v_y(self._v_y)
self._draw_w(self._w)
if self._holonomic:
self._text_v_x.set('v_x = %0.2f m/s' % self._v_x)
self._text_v_y.set('v_y = %0.2f m/s' % self._v_y)
self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w))
else:
self._text_v_x.set('v = %0.2f m/s' % self._v_x)
self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w))
twist = Twist(linear, angular)
self._pub_cmd.publish(twist)
def _publish_twist(self, event):
self._send_motion()
def _relative_motion(self, x, y):
dx = self._x - x
dy = self._y - y
dx /= float(self._width)
dy /= float(self._height)
dx = max(-1.0, min(dx, 1.0))
dy = max(-1.0, min(dy, 1.0))
return dx, dy
def _change_to_motion_linear(self, event):
if self._y is not None:
y = event.x
self._y_angular = self._y - y
self._y = self._y_linear + y
def _change_to_motion_angular(self, event):
if self._y is not None:
y = event.x
self._y_linear = self._y - y
self._y = self._y_angular + y
def main():
rospy.init_node('mouse_teleop')
MouseTeleop()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
| [
[
[
167,
174
],
[
1004,
1011
],
[
1180,
1187
],
[
1355,
1362
],
[
1667,
1674
],
[
1744,
1751
],
[
1789,
1796
],
[
1836,
1843
],
[
1885,
1892
],
[
1983,
1990
],
[
2036,
2043
],
[
2100,
2107
],
[
2149,
2156
]
],
[
[
183,
188
],
[
6948,
6953
],
[
348,
353
],
[
405,
410
],
[
462,
467
],
[
558,
563
],
[
3537,
3542
],
[
3602,
3607
],
[
6834,
6839
]
],
[
[
219,
224
],
[
587,
592
],
[
6043,
6048
]
],
[
[
226,
233
],
[
5452,
5459
],
[
5493,
5500
]
],
[
[
242,
247
],
[
5213,
5218
],
[
5853,
5858
],
[
6002,
6007
]
],
[
[
256,
267
],
[
6871,
6882
]
],
[
[
6822,
6826
],
[
6930,
6934
]
]
] |
from typing import Tuple, Union, Callable, Optional, Sequence
from pytest_mock import MockerFixture
import pytest
import numpy as np
import dask.array as da
from squidpy.im import (
segment,
ImageContainer,
SegmentationCustom,
SegmentationWatershed,
)
from squidpy.im._segment import _SEG_DTYPE
from squidpy._constants._constants import SegmentationBackend
from squidpy._constants._pkg_constants import Key
def dummy_segment(arr: np.ndarray) -> np.ndarray:
assert isinstance(arr, np.ndarray)
assert arr.ndim == 3
return arr[..., 0].astype(np.uint32)
class TestGeneral:
@pytest.mark.parametrize("ndim", [2, 3])
def test_input_ndim(self, ndim: int):
img = np.zeros(shape=(10, 10))
if ndim == 3:
img = img[..., np.newaxis]
sc = SegmentationCustom(dummy_segment)
res = sc.segment(img)
assert isinstance(res, np.ndarray)
assert res.ndim == 3
if ndim == 2:
assert res.shape == img.shape + (1,)
else:
assert res.shape == img.shape
def test_segment_invalid_shape(self):
img = np.zeros(shape=(1, 10, 10, 2))
sc = SegmentationCustom(dummy_segment)
with pytest.raises(ValueError, match=r"Expected `2` or `3` dimensions"):
sc.segment(img)
def test_segment_container(self):
img = ImageContainer(np.zeros(shape=(10, 10, 1)), layer="image")
sc = SegmentationCustom(dummy_segment)
res = sc.segment(img, layer="image", library_id=img["image"].z.values[0])
assert isinstance(res, ImageContainer)
assert res.shape == img.shape
assert "image" in res
assert res["image"].dims == img["image"].dims
class TestWatershed:
@pytest.mark.parametrize("thresh", [None, 0.1, 0.5, 1.0])
def test_threshold(self, thresh: Optional[float], mocker: MockerFixture):
img = np.zeros((100, 200), dtype=np.float64)
img[2:10, 2:10] = 1.0
img[30:34, 10:16] = 1.0
img = ImageContainer(img, layer="image")
sw = SegmentationWatershed()
spy = mocker.spy(sw, "_segment")
res = sw.segment(img, layer="image", library_id=img["image"].z.values[0], fn_kwargs={"thresh": thresh})
assert isinstance(res, ImageContainer)
spy.assert_called_once()
call = spy.call_args_list[0]
assert call[1]["thresh"] == thresh
class TestHighLevel:
def test_invalid_layer(self, small_cont: ImageContainer):
with pytest.raises(KeyError, match=r"Image layer `foobar` not found in"):
segment(small_cont, layer="foobar")
@pytest.mark.parametrize("method", ["watershed", dummy_segment])
def test_method(self, small_cont: ImageContainer, method: Union[str, Callable]):
res = segment(small_cont, method=method, copy=True)
assert isinstance(res, ImageContainer)
assert res.shape == small_cont.shape
if callable(method):
method = SegmentationBackend.CUSTOM.s
assert Key.img.segment(method) in res
if method in ("log", "dog", "dog"):
assert res[Key.img.segment(method)].values.max() <= 1
@pytest.mark.parametrize("dy", [11, 0.5, None])
@pytest.mark.parametrize("dx", [15, 0.1, None])
def test_size(self, small_cont: ImageContainer, dy: Optional[Union[int, float]], dx: Optional[Union[int, float]]):
res = segment(small_cont, size=(dy, dx), copy=True)
assert isinstance(res, ImageContainer)
assert res.shape == small_cont.shape
@pytest.mark.parametrize("channel", [0, 1, 2])
def test_channel(self, small_cont: ImageContainer, channel: int):
segment(small_cont, copy=False, layer="image", channel=channel)
assert Key.img.segment("watershed") in small_cont
np.testing.assert_array_equal(
list(small_cont[Key.img.segment("watershed")].dims),
["y", "x", "z", f"{small_cont['image'].dims[-1]}:{channel}"],
)
def test_all_channels(self, small_cont: ImageContainer):
def func(arr: np.ndarray):
assert arr.shape == (small_cont.shape + (n_channels,))
return np.zeros(arr.shape[:2], dtype=np.uint8)
n_channels = small_cont["image"].sizes["channels"]
segment(small_cont, copy=False, layer="image", channel=None, method=func, layer_added="seg")
np.testing.assert_array_equal(small_cont["seg"], np.zeros(small_cont.shape + (1, 1)))
assert small_cont["seg"].dtype == _SEG_DTYPE
@pytest.mark.parametrize("key_added", [None, "foo"])
def test_key_added(self, small_cont: ImageContainer, key_added: Optional[str]):
res = segment(small_cont, copy=False, layer="image", layer_added=key_added)
assert res is None
assert Key.img.segment("watershed", layer_added=key_added) in small_cont
def test_passing_kwargs(self, small_cont: ImageContainer):
def func(chunk: np.ndarray, sentinel: bool = False):
assert sentinel, "Sentinel not set."
return np.zeros(chunk[..., 0].shape, dtype=_SEG_DTYPE)
segment(
small_cont, method=func, layer="image", layer_added="bar", chunks=25, lazy=False, depth=None, sentinel=True
)
assert small_cont["bar"].values.dtype == _SEG_DTYPE
np.testing.assert_array_equal(small_cont["bar"].values, 0)
@pytest.mark.parametrize("dask_input", [False, True])
@pytest.mark.parametrize("chunks", [25, (50, 50, 1), "auto"])
@pytest.mark.parametrize("lazy", [False, True])
def test_dask_segment(
self, small_cont: ImageContainer, dask_input: bool, chunks: Union[int, Tuple[int, ...], str], lazy: bool
):
def func(chunk: np.ndarray):
if isinstance(chunks, tuple):
np.testing.assert_array_equal(chunk.shape, [chunks[0] + 2 * d, chunks[1] + 2 * d, 1])
elif isinstance(chunks, int):
np.testing.assert_array_equal(chunk.shape, [chunks + 2 * d, chunks + 2 * d, 1])
return np.zeros(chunk[..., 0].shape, dtype=_SEG_DTYPE)
small_cont["foo"] = da.asarray(small_cont["image"].data) if dask_input else small_cont["image"].values
d = 10 # overlap depth
assert isinstance(small_cont["foo"].data, da.Array if dask_input else np.ndarray)
segment(small_cont, method=func, layer="foo", layer_added="bar", chunks=chunks, lazy=lazy, depth={0: d, 1: d})
if lazy:
assert isinstance(small_cont["bar"].data, da.Array)
small_cont.compute()
assert isinstance(small_cont["foo"].data, np.ndarray)
else:
# make sure we didn't accidentally trigger foo's computation
assert isinstance(small_cont["foo"].data, da.Array if dask_input else np.ndarray)
assert isinstance(small_cont["bar"].data, np.ndarray)
assert small_cont["bar"].values.dtype == _SEG_DTYPE
np.testing.assert_array_equal(small_cont["bar"].values, 0)
def test_copy(self, small_cont: ImageContainer):
prev_keys = set(small_cont)
res = segment(small_cont, copy=True, layer="image")
assert isinstance(res, ImageContainer)
assert set(small_cont) == prev_keys
assert Key.img.segment("watershed") in res
def test_parallelize(self, small_cont: ImageContainer):
res1 = segment(small_cont, layer="image", n_jobs=1, copy=True)
res2 = segment(small_cont, layer="image", n_jobs=2, copy=True)
np.testing.assert_array_equal(
res1[Key.img.segment("watershed")].values, res2[Key.img.segment("watershed")].values
)
@pytest.mark.parametrize("chunks", [25, 50])
def test_blocking(self, small_cont: ImageContainer, chunks: int):
def func(chunk: np.ndarray):
labels = np.zeros(chunk[..., 0].shape, dtype=np.uint32)
labels[0, 0] = 1
return labels
segment(small_cont, method=func, layer="image", layer_added="bar", chunks=chunks, lazy=False, depth=None)
# blocks are label from top-left to bottom-right in an ascending order [0, num_blocks - 1]
# lowest n bits are allocated for block, rest is for the label (i.e. for blocksize=25, we need 16 blocks ids
# from [0, 15], which can be stored in 4 bits, then we just prepend 1 bit (see the above `func`, resulting
# in unique 16 labels [10000, 11111]
expected = np.zeros_like(small_cont["bar"].values)
start = 16 if chunks == 25 else 4
for i in range(0, 100, chunks):
for j in range(0, 100, chunks):
expected[i, j] = start
start += 1
assert small_cont["bar"].values.dtype == _SEG_DTYPE
np.testing.assert_array_equal(small_cont["bar"].values, expected)
@pytest.mark.parametrize("size", [None, 11])
def test_watershed_works(self, size: Optional[int]):
img_orig = np.zeros((100, 200, 30), dtype=np.float64)
img_orig[2:10, 2:10] = 1.0
img_orig[30:34, 10:16] = 1.0
cont = ImageContainer(img_orig, layer="image_0")
segment(
img=cont,
method="watershed",
layer="image_0",
layer_added="segment",
size=size,
channel=0,
thresh=0.5,
)
# check that blobs are in segments
assert np.mean(cont.data["segment"].values[img_orig[:, :, 0] > 0] > 0) > 0.5
# for size=10, "fails with `size=10` due to border effects"
# the reason why there is no test for it that inside tox, it "works" (i.e. the assertion passes)
# but outside, the assertion fails, as it should
@pytest.mark.parametrize("library_id", [None, "3", ["1", "2"]])
def test_library_id(self, cont_4d: ImageContainer, library_id: Optional[Union[str, Sequence[str]]]):
def func(arr: np.ndarray):
assert arr.shape == cont_4d.shape + (1,)
return np.ones(arr[..., 0].shape, dtype=_SEG_DTYPE)
segment(cont_4d, method=func, layer="image", layer_added="image_seg", library_id=library_id, copy=False)
np.testing.assert_array_equal(cont_4d["image"].coords, cont_4d["image_seg"].coords)
if library_id is None:
np.testing.assert_array_equal(1, cont_4d["image_seg"])
else:
if isinstance(library_id, str):
library_id = [library_id]
for lid in library_id:
np.testing.assert_array_equal(1, cont_4d["image_seg"].sel(z=lid))
for lid in set(cont_4d.library_ids) - set(library_id):
# channels have been changed, apply sets to 0
np.testing.assert_array_equal(0, cont_4d["image_seg"].sel(z=lid))
| [
[
[
19,
24
],
[
5651,
5656
]
],
[
[
26,
31
],
[
2753,
2758
],
[
3337,
3342
],
[
3370,
3375
],
[
5640,
5645
],
[
9804,
9809
]
],
[
[
33,
41
],
[
2764,
2772
]
],
[
[
43,
51
],
[
1847,
1855
],
[
3328,
3336
],
[
3361,
3369
],
[
4643,
4651
],
[
8877,
8885
],
[
9795,
9803
]
],
[
[
53,
61
],
[
9815,
9823
]
],
[
[
86,
99
],
[
1872,
1885
]
],
[
[
107,
113
],
[
608,
614
],
[
1753,
1759
],
[
2627,
2633
],
[
3173,
3179
],
[
3225,
3231
],
[
3550,
3556
],
[
4523,
4529
],
[
5374,
5380
],
[
5432,
5438
],
[
5498,
5504
],
[
7634,
7640
],
[
8792,
8798
],
[
9665,
9671
],
[
1217,
1223
],
[
2504,
2510
]
],
[
[
122,
133
],
[
465,
467
],
[
450,
452
],
[
504,
506
],
[
571,
573
],
[
704,
706
],
[
778,
780
],
[
900,
902
],
[
1125,
1127
],
[
1381,
1383
],
[
1902,
1904
],
[
1929,
1931
],
[
3805,
3807
],
[
4378,
4380
],
[
4427,
4429
],
[
5309,
5311
],
[
6301,
6303
],
[
6602,
6604
],
[
6783,
6785
],
[
6846,
6848
],
[
6926,
6928
],
[
7490,
7492
],
[
8419,
8421
],
[
8720,
8722
],
[
8912,
8914
],
[
8943,
8945
],
[
9358,
9360
],
[
10108,
10110
],
[
10235,
10237
],
[
10441,
10443
],
[
10652,
10654
],
[
4069,
4071
],
[
4168,
4170
],
[
4198,
4200
],
[
4940,
4942
],
[
5045,
5047
],
[
5716,
5718
],
[
5787,
5789
],
[
5931,
5933
],
[
6031,
6033
],
[
7772,
7774
],
[
7806,
7808
],
[
7842,
7844
],
[
9855,
9857
],
[
9940,
9942
]
],
[
[
141,
157
],
[
6108,
6110
],
[
6273,
6275
],
[
6505,
6507
],
[
6755,
6757
]
],
[
[
188,
195
],
[
2585,
2592
],
[
2790,
2797
],
[
3405,
3412
],
[
3674,
3681
],
[
4276,
4283
],
[
4673,
4680
],
[
5102,
5109
],
[
6322,
6329
],
[
7089,
7096
],
[
7354,
7361
],
[
7425,
7432
],
[
7917,
7924
],
[
9093,
9100
],
[
9994,
10001
]
],
[
[
201,
215
],
[
1366,
1380
],
[
1587,
1601
],
[
2017,
2031
],
[
2276,
2290
],
[
2474,
2488
],
[
2729,
2743
],
[
2868,
2882
],
[
3308,
3322
],
[
3483,
3497
],
[
3635,
3649
],
[
4030,
4044
],
[
4616,
4630
],
[
4899,
4913
],
[
5598,
5612
],
[
7022,
7036
],
[
7167,
7181
],
[
7322,
7336
],
[
7718,
7732
],
[
9043,
9057
],
[
9767,
9781
]
],
[
[
221,
239
],
[
803,
821
],
[
1169,
1187
],
[
1438,
1456
]
],
[
[
245,
266
],
[
2066,
2087
]
],
[
[
302,
312
],
[
4506,
4516
],
[
5290,
5300
],
[
6907,
6917
],
[
8701,
8711
],
[
5081,
5091
],
[
6067,
6077
],
[
9973,
9983
]
],
[
[
355,
374
],
[
2980,
2999
]
],
[
[
421,
424
],
[
3025,
3028
],
[
3124,
3127
],
[
3754,
3757
],
[
3864,
3867
],
[
4786,
4789
],
[
7242,
7245
],
[
7538,
7541
],
[
7581,
7584
]
],
[
[
431,
444
],
[
2675,
2688
],
[
822,
835
],
[
1188,
1201
],
[
1457,
1470
]
],
[
[
590,
601
]
],
[
[
1733,
1746
]
],
[
[
2414,
2427
]
]
] |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import argparse
import contextlib
import os
import subprocess
from pathlib import Path
RESTLER_TEMP_DIR = 'restler_working_dir'
@contextlib.contextmanager
def usedir(dir):
""" Helper for 'with' statements that changes the current directory to
@dir and then changes the directory back to its original once the 'with' ends.
Can be thought of like pushd with an auto popd after the 'with' scope ends
"""
curr = os.getcwd()
os.chdir(dir)
try:
yield
finally:
os.chdir(curr)
def compile_spec(api_spec_path, restler_dll_path):
""" Compiles a specified api spec
@param api_spec_path: The absolute path to the Swagger file to compile
@type api_spec_path: Str
@param restler_dll_path: The absolute path to the RESTler driver's dll
@type restler_dll_path: Str
@return: None
@rtype : None
"""
if not os.path.exists(RESTLER_TEMP_DIR):
os.makedirs(RESTLER_TEMP_DIR)
with usedir(RESTLER_TEMP_DIR):
command=f"dotnet \"{restler_dll_path}\" compile --api_spec \"{api_spec_path}\""
print(f"command: {command}")
subprocess.run(command, shell=True)
def add_common_settings(ip, port, host, use_ssl, command):
if not use_ssl:
command = f"{command} --no_ssl"
if ip is not None:
command = f"{command} --target_ip {ip}"
if port is not None:
command = f"{command} --target_port {port}"
if host is not None:
command = f"{command} --host {host}"
return command
def replay_bug(ip, port, host, use_ssl, restler_dll_path, replay_log):
""" Runs RESTler's replay mode on the specified replay file
"""
with usedir(RESTLER_TEMP_DIR):
command = (
f"dotnet \"{restler_dll_path}\" replay --replay_log \"{replay_log}\""
)
command = add_common_settings(ip, port, host, use_ssl, command)
print(f"command: {command}\n")
subprocess.run(command, shell=True)
def replay_from_dir(ip, port, host, use_ssl, restler_dll_path, replay_dir):
import glob
from pathlib import Path
# get all the 500 replay files in the bug buckets directory
bug_buckets = glob.glob(os.path.join(replay_dir, 'RestlerResults', '**/bug_buckets/*500*'))
print(f"buckets: {bug_buckets}")
for file_path in bug_buckets:
if "bug_buckets" in os.path.basename(file_path):
continue
print(f"Testing replay file: {file_path}")
replay_bug(ip, port, host, use_ssl, restler_dll_path, Path(file_path).absolute())
pass
def test_spec(ip, port, host, use_ssl, restler_dll_path, task):
""" Runs RESTler's test mode on a specified Compile directory
@param ip: The IP of the service to test
@type ip: Str
@param port: The port of the service to test
@type port: Str
@param host: The hostname of the service to test
@type host: Str
@param use_ssl: If False, set the --no_ssl parameter when executing RESTler
@type use_ssl: Boolean
@param restler_dll_path: The absolute path to the RESTler driver's dll
@type restler_dll_path: Str
@return: None
@rtype : None
"""
import json
with usedir(RESTLER_TEMP_DIR):
compile_dir = Path(f'Compile')
grammar_file_path = compile_dir.joinpath('grammar.py')
dictionary_file_path = compile_dir.joinpath('dict.json')
settings_file_path = compile_dir.joinpath('engine_settings.json')
command = (
f"dotnet \"{restler_dll_path}\" {task} --grammar_file \"{grammar_file_path}\" --dictionary_file \"{dictionary_file_path}\""
f" --settings \"{settings_file_path}\""
)
print(f"command: {command}\n")
command = add_common_settings(ip, port, host, use_ssl, command)
subprocess.run(command, shell=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--api_spec_path',
help='The API Swagger specification to compile and test',
type=str, required=False, default=None)
parser.add_argument('--ip',
help='The IP of the service to test',
type=str, required=False, default=None)
parser.add_argument('--port',
help='The port of the service to test',
type=str, required=False, default=None)
parser.add_argument('--restler_drop_dir',
help="The path to the RESTler drop",
type=str, required=True)
parser.add_argument('--use_ssl',
help='Set this flag if you want to use SSL validation for the socket',
action='store_true')
parser.add_argument('--host',
help='The hostname of the service to test',
type=str, required=False, default=None)
parser.add_argument('--task',
help='The task to run (test, fuzz-lean, fuzz, or replay)'
'For test, fuzz-lean, and fuzz, the spec is compiled first.'
'For replay, bug buckets from the specified task directory are re-played.',
type=str, required=False, default='test')
parser.add_argument('--replay_bug_buckets_dir',
help='For the replay task, specifies the directory in which to search for bug buckets.',
type=str, required=False, default=None)
args = parser.parse_args()
restler_dll_path = Path(os.path.abspath(args.restler_drop_dir)).joinpath('restler', 'Restler.dll')
print(f"\nrestler_dll_path: {restler_dll_path}\n")
if args.task == "replay":
replay_from_dir(args.ip, args.port, args.host, args.use_ssl, restler_dll_path.absolute(), args.replay_bug_buckets_dir)
else:
if args.api_spec_path is None:
print("api_spec_path is required for all tasks except the replay task.")
exit(-1)
api_spec_path = os.path.abspath(args.api_spec_path)
compile_spec(api_spec_path, restler_dll_path.absolute())
test_spec(args.ip, args.port, args.host, args.use_ssl, restler_dll_path.absolute(), args.task)
print(f"Test complete.\nSee {os.path.abspath(RESTLER_TEMP_DIR)} for results.")
| [
[
[
80,
88
],
[
3926,
3934
]
],
[
[
96,
106
],
[
204,
214
]
],
[
[
114,
116
],
[
5626,
5628
],
[
6093,
6095
],
[
6331,
6333
],
[
504,
506
],
[
520,
522
],
[
578,
580
],
[
954,
956
],
[
996,
998
],
[
2248,
2250
],
[
2415,
2417
]
],
[
[
124,
134
],
[
1195,
1205
],
[
1998,
2008
],
[
3848,
3858
]
],
[
[
155,
159
],
[
5621,
5625
],
[
3290,
3294
]
],
[
[
161,
177
],
[
6347,
6363
],
[
969,
985
],
[
1008,
1024
],
[
1043,
1059
],
[
1748,
1764
],
[
3249,
3265
]
],
[
[
234,
240
],
[
1036,
1042
],
[
1741,
1747
],
[
3242,
3248
]
],
[
[
598,
610
],
[
6137,
6149
]
],
[
[
1236,
1255
],
[
1897,
1916
],
[
3785,
3804
]
],
[
[
1593,
1603
],
[
2524,
2534
]
],
[
[
2039,
2054
],
[
5795,
5810
]
],
[
[
2620,
2629
],
[
6202,
6211
]
],
[
[
3917,
3923
],
[
3956,
3962
],
[
4145,
4151
],
[
4303,
4309
],
[
4465,
4471
],
[
4621,
4627
],
[
4798,
4804
],
[
4964,
4970
],
[
5341,
5347
],
[
5578,
5584
]
],
[
[
5571,
5575
],
[
5642,
5646
],
[
5764,
5768
],
[
5811,
5815
],
[
5820,
5824
],
[
5831,
5835
],
[
5842,
5846
],
[
5885,
5889
],
[
5935,
5939
],
[
6109,
6113
],
[
6212,
6216
],
[
6221,
6225
],
[
6232,
6236
],
[
6243,
6247
],
[
6286,
6290
]
],
[
[
5602,
5618
],
[
5734,
5750
],
[
5856,
5872
],
[
6165,
6181
],
[
6257,
6273
]
],
[
[
6077,
6090
],
[
6150,
6163
]
]
] |
import urllib
from contextlib import suppress
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from django.http import FileResponse, Http404, HttpResponseServerError
from django.shortcuts import redirect
from django.template import TemplateDoesNotExist, loader
from django.urls import get_callable
from django.utils.http import url_has_allowed_host_and_scheme
from django.utils.timezone import now
from django.views.generic import FormView
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.views.generic.edit import ModelFormMixin, ProcessFormView
from django_context_decorator import context
from pretalx.cfp.forms.auth import ResetForm
from pretalx.common.mail import SendMailException
from pretalx.common.phrases import phrases
from pretalx.person.forms import UserForm
from pretalx.person.models import User
class CreateOrUpdateView(
SingleObjectTemplateResponseMixin, ModelFormMixin, ProcessFormView
):
def set_object(self):
if getattr(self, "object", None) is None:
setattr(self, "object", None)
with suppress(self.model.DoesNotExist, AttributeError):
setattr(self, "object", self.get_object())
def get(self, request, *args, **kwargs):
self.set_object()
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.set_object()
return super().post(request, *args, **kwargs)
def is_form_bound(request, form_name, form_param="form"):
return request.method == "POST" and request.POST.get(form_param) == form_name
def get_static(request, path, content_type): # pragma: no cover
"""TODO: move to staticfiles usage as per https://gist.github.com/SmileyChris/8d472f2a67526e36f39f3c33520182bc
This would avoid potential directory traversal by … a malicious urlconfig, so not a huge attack vector."""
path = settings.BASE_DIR / "pretalx/static" / path
if not path.exists():
raise Http404()
return FileResponse(
open(path, "rb"), content_type=content_type, as_attachment=False
)
class GenericLoginView(FormView):
form_class = UserForm
@context
def password_reset_link(self):
return self.get_password_reset_link()
def dispatch(self, request, *args, **kwargs):
if not self.request.user.is_anonymous:
return redirect(self.get_success_url())
return super().dispatch(request, *args, **kwargs)
def get_success_url(self):
params = self.request.GET.copy()
url = urllib.parse.unquote(params.pop("next", [""])[0])
params = "?" + params.urlencode() if params else ""
if url and url_has_allowed_host_and_scheme(url, allowed_hosts=None):
return url + params
return self.success_url + params
def form_valid(self, form):
pk = form.save()
user = User.objects.filter(pk=pk).first()
login(self.request, user, backend="django.contrib.auth.backends.ModelBackend")
return redirect(self.get_success_url())
class GenericResetView(FormView):
form_class = ResetForm
def form_valid(self, form):
user = form.cleaned_data["user"]
if not user or (
user.pw_reset_time
and (now() - user.pw_reset_time).total_seconds() < 3600 * 24
):
messages.success(self.request, phrases.cfp.auth_password_reset)
return redirect(self.get_success_url())
try:
user.reset_password(
event=getattr(self.request, "event", None),
orga="orga" in self.request.resolver_match.namespaces,
)
except SendMailException: # pragma: no cover
messages.error(self.request, phrases.base.error_sending_mail)
return self.get(self.request, *self.args, **self.kwargs)
messages.success(self.request, phrases.cfp.auth_password_reset)
user.log_action("pretalx.user.password.reset")
return redirect(self.get_success_url())
def handle_500(request):
try:
template = loader.get_template("500.html")
except TemplateDoesNotExist: # pragma: no cover
return HttpResponseServerError(
"Internal server error. Please contact the administrator for details.",
content_type="text/html",
)
context = {}
try: # This should never fail, but can't be too cautious in error views
context["request_path"] = urllib.parse.quote(request.path)
except Exception: # pragma: no cover
pass
return HttpResponseServerError(template.render(context))
def error_view(status_code):
if status_code == 4031:
return get_callable(settings.CSRF_FAILURE_VIEW)
if status_code == 500:
return handle_500
exceptions = {
400: SuspiciousOperation,
403: PermissionDenied,
404: Http404,
}
exception = exceptions[status_code]
def error_view(request, *args, **kwargs):
raise exception
return error_view
| [
[
[
7,
13
],
[
2668,
2674
],
[
4583,
4589
]
],
[
[
37,
45
],
[
1214,
1222
]
],
[
[
71,
79
],
[
2018,
2026
],
[
4819,
4827
]
],
[
[
107,
115
],
[
3461,
3469
],
[
3835,
3843
],
[
3975,
3983
]
],
[
[
148,
153
],
[
3044,
3049
]
],
[
[
189,
205
],
[
4966,
4982
]
],
[
[
207,
226
],
[
4932,
4951
]
],
[
[
251,
263
],
[
2123,
2135
]
],
[
[
265,
272
],
[
2102,
2109
],
[
4997,
5004
]
],
[
[
274,
297
],
[
4298,
4321
],
[
4682,
4705
]
],
[
[
327,
335
],
[
2490,
2498
],
[
3138,
3146
],
[
3544,
3552
],
[
4110,
4118
]
],
[
[
364,
384
],
[
4241,
4261
]
],
[
[
386,
392
],
[
4198,
4204
]
],
[
[
417,
429
],
[
4806,
4818
]
],
[
[
460,
491
],
[
2797,
2828
]
],
[
[
526,
529
],
[
3382,
3385
]
],
[
[
563,
571
],
[
2241,
2249
],
[
3196,
3204
]
],
[
[
612,
645
],
[
1013,
1046
]
],
[
[
684,
698
],
[
1048,
1062
]
],
[
[
700,
715
],
[
1064,
1079
]
],
[
[
753,
760
],
[
2284,
2291
]
],
[
[
797,
806
],
[
3224,
3233
]
],
[
[
839,
856
],
[
3784,
3801
]
],
[
[
892,
899
],
[
3492,
3499
],
[
3864,
3871
],
[
4006,
4013
]
],
[
[
933,
941
],
[
2269,
2277
]
],
[
[
976,
980
],
[
3001,
3005
]
],
[
[
989,
1007
]
],
[
[
1578,
1591
]
],
[
[
1720,
1730
]
],
[
[
2224,
2240
]
],
[
[
3179,
3195
]
],
[
[
4149,
4159
],
[
4889,
4899
]
],
[
[
4738,
4748
]
]
] |
# Generated by Django 2.1 on 2019-10-12 09:44
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Stock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ticker', models.CharField(max_length=10)),
('open', models.FloatField()),
('close', models.FloatField()),
('volume', models.IntegerField()),
],
),
]
| [
[
[
69,
79
],
[
106,
116
],
[
205,
215
]
],
[
[
81,
87
],
[
299,
305
],
[
418,
424
],
[
477,
483
],
[
525,
531
],
[
574,
580
]
],
[
[
96,
105
]
]
] |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for image utils."""
import errno
import math
import cryptography
import ddt
import mock
from oslo_concurrency import processutils
from oslo_utils import units
from six.moves import builtins
from cinder import exception
from cinder.image import image_utils
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.volume import throttling
class TestQemuImgInfo(test.TestCase):
@mock.patch('os.name', new='posix')
@mock.patch('oslo_utils.imageutils.QemuImgInfo')
@mock.patch('cinder.utils.execute')
def test_qemu_img_info(self, mock_exec, mock_info):
mock_out = mock.sentinel.out
mock_err = mock.sentinel.err
test_path = mock.sentinel.path
mock_exec.return_value = (mock_out, mock_err)
output = image_utils.qemu_img_info(test_path)
mock_exec.assert_called_once_with('env', 'LC_ALL=C', 'qemu-img',
'info', test_path, run_as_root=True,
prlimit=image_utils.QEMU_IMG_LIMITS)
self.assertEqual(mock_info.return_value, output)
@mock.patch('os.name', new='posix')
@mock.patch('oslo_utils.imageutils.QemuImgInfo')
@mock.patch('cinder.utils.execute')
def test_qemu_img_info_not_root(self, mock_exec, mock_info):
mock_out = mock.sentinel.out
mock_err = mock.sentinel.err
test_path = mock.sentinel.path
mock_exec.return_value = (mock_out, mock_err)
output = image_utils.qemu_img_info(test_path,
force_share=False,
run_as_root=False)
mock_exec.assert_called_once_with('env', 'LC_ALL=C', 'qemu-img',
'info', test_path, run_as_root=False,
prlimit=image_utils.QEMU_IMG_LIMITS)
self.assertEqual(mock_info.return_value, output)
@mock.patch('cinder.image.image_utils.os')
@mock.patch('oslo_utils.imageutils.QemuImgInfo')
@mock.patch('cinder.utils.execute')
def test_qemu_img_info_on_nt(self, mock_exec, mock_info, mock_os):
mock_out = mock.sentinel.out
mock_err = mock.sentinel.err
test_path = mock.sentinel.path
mock_exec.return_value = (mock_out, mock_err)
mock_os.name = 'nt'
output = image_utils.qemu_img_info(test_path)
mock_exec.assert_called_once_with('qemu-img', 'info', test_path,
run_as_root=True,
prlimit=image_utils.QEMU_IMG_LIMITS)
self.assertEqual(mock_info.return_value, output)
@mock.patch('cinder.utils.execute')
def test_get_qemu_img_version(self, mock_exec):
mock_out = "qemu-img version 2.0.0"
mock_err = mock.sentinel.err
mock_exec.return_value = (mock_out, mock_err)
expected_version = [2, 0, 0]
version = image_utils.get_qemu_img_version()
mock_exec.assert_called_once_with('qemu-img', '--version',
check_exit_code=False)
self.assertEqual(expected_version, version)
@mock.patch.object(image_utils, 'get_qemu_img_version')
def test_validate_qemu_img_version(self, mock_get_qemu_img_version):
fake_current_version = [1, 8]
mock_get_qemu_img_version.return_value = fake_current_version
minimum_version = '1.8'
image_utils.check_qemu_img_version(minimum_version)
mock_get_qemu_img_version.assert_called_once_with()
@mock.patch.object(image_utils, 'get_qemu_img_version')
def _test_validate_unsupported_qemu_img_version(self,
mock_get_qemu_img_version,
current_version=None):
mock_get_qemu_img_version.return_value = current_version
minimum_version = '2.0'
self.assertRaises(exception.VolumeBackendAPIException,
image_utils.check_qemu_img_version,
minimum_version)
mock_get_qemu_img_version.assert_called_once_with()
def test_validate_qemu_img_version_not_installed(self):
self._test_validate_unsupported_qemu_img_version()
def test_validate_older_qemu_img_version(self):
self._test_validate_unsupported_qemu_img_version(
current_version=[1, 8])
@ddt.ddt
class TestConvertImage(test.TestCase):
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=True)
def test_defaults_block_dev_with_size_info(self, mock_isblk,
mock_exec, mock_info):
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = mock.sentinel.out_format
mock_info.return_value.virtual_size = 1048576
throttle = throttling.Throttle(prefix=['cgcmd'])
with mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=True):
output = image_utils.convert_image(source, dest, out_format,
throttle=throttle)
self.assertIsNone(output)
mock_exec.assert_called_once_with('cgcmd', 'qemu-img', 'convert',
'-O', out_format, '-t', 'none',
source, dest, run_as_root=True)
mock_exec.reset_mock()
with mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=False):
output = image_utils.convert_image(source, dest, out_format)
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'convert',
'-O', out_format, source, dest,
run_as_root=True)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=True)
def test_defaults_block_dev_without_size_info(self, mock_isblk,
mock_exec,
mock_info):
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = mock.sentinel.out_format
mock_info.side_effect = ValueError
throttle = throttling.Throttle(prefix=['cgcmd'])
with mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=True):
output = image_utils.convert_image(source, dest, out_format,
throttle=throttle)
mock_info.assert_called_once_with(source, run_as_root=True)
self.assertIsNone(output)
mock_exec.assert_called_once_with('cgcmd', 'qemu-img', 'convert',
'-O', out_format, '-t', 'none',
source, dest, run_as_root=True)
mock_exec.reset_mock()
with mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=False):
output = image_utils.convert_image(source, dest, out_format)
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'convert',
'-O', out_format, source, dest,
run_as_root=True)
@mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=True)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=False)
def test_defaults_not_block_dev_with_size_info(self, mock_isblk,
mock_exec,
mock_info,
mock_odirect):
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = mock.sentinel.out_format
out_subformat = 'fake_subformat'
mock_info.return_value.virtual_size = 1048576
output = image_utils.convert_image(source, dest, out_format,
out_subformat=out_subformat)
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'convert', '-O',
out_format, '-o',
'subformat=%s' % out_subformat,
source, dest,
run_as_root=True)
@mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=True)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=False)
def test_defaults_not_block_dev_without_size_info(self,
mock_isblk,
mock_exec,
mock_info,
mock_odirect):
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = mock.sentinel.out_format
out_subformat = 'fake_subformat'
mock_info.side_effect = ValueError
output = image_utils.convert_image(source, dest, out_format,
out_subformat=out_subformat)
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'convert', '-O',
out_format, '-o',
'subformat=%s' % out_subformat,
source, dest,
run_as_root=True)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=True)
def test_defaults_block_dev_ami_img(self, mock_isblk, mock_exec,
mock_info):
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = mock.sentinel.out_format
mock_info.return_value.virtual_size = 1048576
with mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=True):
output = image_utils.convert_image(source, dest, out_format,
src_format='AMI')
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'convert',
'-O', out_format, '-t', 'none',
source, dest, run_as_root=True)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=False)
@mock.patch('cinder.volume.volume_utils.check_for_odirect_support')
def test_convert_to_vhd(self, mock_check_odirect, mock_isblk,
mock_exec, mock_info):
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = "vhd"
mock_info.return_value.virtual_size = 1048576
output = image_utils.convert_image(source, dest, out_format)
self.assertIsNone(output)
# Qemu uses the legacy "vpc" format name, instead of "vhd".
mock_exec.assert_called_once_with('qemu-img', 'convert',
'-O', 'vpc',
source, dest, run_as_root=True)
@ddt.data(True, False)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=False)
def test_convert_to_qcow2(self,
compress_option,
mock_isblk, mock_exec, mock_info):
self.override_config('image_compress_on_upload', compress_option)
source = mock.sentinel.source
dest = mock.sentinel.dest
out_format = 'qcow2'
mock_info.return_value.virtual_size = 1048576
image_utils.convert_image(source,
dest,
out_format,
compress=True)
exec_args = ['qemu-img', 'convert', '-O', 'qcow2']
if compress_option:
exec_args.append('-c')
exec_args.extend((source, dest))
mock_exec.assert_called_once_with(*exec_args,
run_as_root=True)
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.volume.volume_utils.check_for_odirect_support',
return_value=True)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.utils.execute')
@mock.patch('cinder.utils.is_blk_device', return_value=False)
@mock.patch('os.path.dirname', return_value='fakedir')
@mock.patch('os.path.ismount', return_value=True)
@mock.patch('oslo_utils.fileutils.ensure_tree')
@mock.patch('cinder.image.image_utils.utils.tempdir')
@mock.patch.object(image_utils.LOG, 'error')
def test_not_enough_conversion_space(self,
mock_log,
mock_tempdir,
mock_make,
mock_ismount,
mock_dirname,
mock_isblk,
mock_exec,
mock_info,
mock_odirect,
mock_conf):
source = mock.sentinel.source
mock_conf.image_conversion_dir = 'fakedir'
dest = [mock_conf.image_conversion_dir]
out_format = mock.sentinel.out_format
mock_info.side_effect = ValueError
mock_exec.side_effect = processutils.ProcessExecutionError(
stderr='No space left on device')
self.assertRaises(processutils.ProcessExecutionError,
image_utils.convert_image,
source, dest, out_format)
mock_log.assert_called_with('Insufficient free space on fakedir for'
' image conversion.')
class TestResizeImage(test.TestCase):
@mock.patch('cinder.utils.execute')
def test_defaults(self, mock_exec):
source = mock.sentinel.source
size = mock.sentinel.size
output = image_utils.resize_image(source, size)
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'resize', source,
'sentinel.sizeG', run_as_root=False)
@mock.patch('cinder.utils.execute')
def test_run_as_root(self, mock_exec):
source = mock.sentinel.source
size = mock.sentinel.size
output = image_utils.resize_image(source, size, run_as_root=True)
self.assertIsNone(output)
mock_exec.assert_called_once_with('qemu-img', 'resize', source,
'sentinel.sizeG', run_as_root=True)
class TestFetch(test.TestCase):
@mock.patch('eventlet.tpool.Proxy')
@mock.patch('os.stat')
@mock.patch('cinder.image.image_utils.fileutils')
def test_defaults(self, mock_fileutils, mock_stat, mock_proxy):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
path = 'test_path'
_user_id = mock.sentinel._user_id
_project_id = mock.sentinel._project_id
mock_open = mock.mock_open()
mock_stat.return_value.st_size = 1048576
with mock.patch('cinder.image.image_utils.open',
new=mock_open, create=True):
output = image_utils.fetch(ctxt, image_service, image_id, path,
_user_id, _project_id)
self.assertIsNone(output)
mock_proxy.assert_called_once_with(mock_open.return_value)
image_service.download.assert_called_once_with(ctxt, image_id,
mock_proxy.return_value)
mock_open.assert_called_once_with(path, 'wb')
mock_fileutils.remove_path_on_error.assert_called_once_with(path)
(mock_fileutils.remove_path_on_error.return_value.__enter__
.assert_called_once_with())
(mock_fileutils.remove_path_on_error.return_value.__exit__
.assert_called_once_with(None, None, None))
def test_fetch_enospc(self):
context = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
e = exception.ImageTooBig(image_id=image_id, reason = "fake")
e.errno = errno.ENOSPC
image_service.download.side_effect = e
path = '/test_path'
_user_id = mock.sentinel._user_id
_project_id = mock.sentinel._project_id
with mock.patch('cinder.image.image_utils.open',
new=mock.mock_open(), create=True):
self.assertRaises(exception.ImageTooBig,
image_utils.fetch,
context, image_service, image_id, path,
_user_id, _project_id)
def test_fetch_ioerror(self):
context = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
e = IOError()
e.errno = errno.ECONNRESET
e.strerror = 'Some descriptive message'
image_service.download.side_effect = e
path = '/test_path'
_user_id = mock.sentinel._user_id
_project_id = mock.sentinel._project_id
with mock.patch('cinder.image.image_utils.open',
new=mock.mock_open(), create=True):
self.assertRaisesRegex(exception.ImageDownloadFailed,
e.strerror,
image_utils.fetch,
context, image_service, image_id, path,
_user_id, _project_id)
class MockVerifier(object):
def update(self, data):
return
def verify(self):
return True
class BadVerifier(object):
def update(self, data):
return
def verify(self):
raise cryptography.exceptions.InvalidSignature(
'Invalid signature.'
)
class TestVerifyImageSignature(test.TestCase):
@mock.patch('cursive.signature_utils.get_verifier')
@mock.patch('oslo_utils.fileutils.remove_path_on_error')
def test_image_signature_verify_failed(self, mock_remove, mock_get):
self.mock_object(builtins, 'open', mock.mock_open())
ctxt = mock.sentinel.context
metadata = {'name': 'test image',
'is_public': False,
'protected': False,
'properties':
{'img_signature_certificate_uuid': 'fake_uuid',
'img_signature_hash_method': 'SHA-256',
'img_signature': 'signature',
'img_signature_key_type': 'RSA-PSS'}}
class FakeImageService(object):
def show(self, context, image_id):
return metadata
self.flags(verify_glance_signatures='enabled')
mock_get.return_value = BadVerifier()
self.assertRaises(exception.ImageSignatureVerificationException,
image_utils.verify_glance_image_signature,
ctxt, FakeImageService(), 'fake_id',
'fake_path')
mock_get.assert_called_once_with(
context=ctxt,
img_signature_certificate_uuid='fake_uuid',
img_signature_hash_method='SHA-256',
img_signature='signature',
img_signature_key_type='RSA-PSS')
@mock.patch('cursive.signature_utils.get_verifier')
def test_image_signature_metadata_missing(self, mock_get):
ctxt = mock.sentinel.context
metadata = {'name': 'test image',
'is_public': False,
'protected': False,
'properties': {}}
class FakeImageService(object):
def show(self, context, image_id):
return metadata
self.flags(verify_glance_signatures='enabled')
result = image_utils.verify_glance_image_signature(
ctxt, FakeImageService(), 'fake_id', 'fake_path')
self.assertFalse(result)
mock_get.assert_not_called()
@mock.patch('cursive.signature_utils.get_verifier')
def test_image_signature_metadata_incomplete(self, mock_get):
ctxt = mock.sentinel.context
metadata = {'name': 'test image',
'is_public': False,
'protected': False,
'properties':
{'img_signature_certificate_uuid': None,
'img_signature_hash_method': 'SHA-256',
'img_signature': 'signature',
'img_signature_key_type': 'RSA-PSS'}}
class FakeImageService(object):
def show(self, context, image_id):
return metadata
self.flags(verify_glance_signatures='enabled')
self.assertRaises(exception.InvalidSignatureImage,
image_utils.verify_glance_image_signature, ctxt,
FakeImageService(), 'fake_id', 'fake_path')
mock_get.assert_not_called()
@mock.patch('six.moves.builtins.open')
@mock.patch('eventlet.tpool.execute')
@mock.patch('cursive.signature_utils.get_verifier')
@mock.patch('oslo_utils.fileutils.remove_path_on_error')
def test_image_signature_verify_success(self, mock_remove, mock_get,
mock_exec, mock_open):
ctxt = mock.sentinel.context
metadata = {'name': 'test image',
'is_public': False,
'protected': False,
'properties':
{'img_signature_certificate_uuid': 'fake_uuid',
'img_signature_hash_method': 'SHA-256',
'img_signature': 'signature',
'img_signature_key_type': 'RSA-PSS'}}
class FakeImageService(object):
def show(self, context, image_id):
return metadata
self.flags(verify_glance_signatures='enabled')
mock_get.return_value = MockVerifier()
result = image_utils.verify_glance_image_signature(
ctxt, FakeImageService(), 'fake_id', 'fake_path')
self.assertTrue(result)
mock_exec.assert_called_once_with(
image_utils._verify_image,
mock_open.return_value.__enter__.return_value,
mock_get.return_value)
mock_get.assert_called_once_with(
context=ctxt,
img_signature_certificate_uuid='fake_uuid',
img_signature_hash_method='SHA-256',
img_signature='signature',
img_signature_key_type='RSA-PSS')
class TestVerifyImage(test.TestCase):
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.fileutils')
@mock.patch('cinder.image.image_utils.fetch')
def test_defaults(self, mock_fetch, mock_fileutils, mock_info):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
mock_data = mock_info.return_value
mock_data.file_format = 'test_format'
mock_data.backing_file = None
output = image_utils.fetch_verify_image(ctxt, image_service,
image_id, dest)
self.assertIsNone(output)
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
dest, None, None)
mock_info.assert_called_once_with(dest,
run_as_root=True,
force_share=False)
mock_fileutils.remove_path_on_error.assert_called_once_with(dest)
(mock_fileutils.remove_path_on_error.return_value.__enter__
.assert_called_once_with())
(mock_fileutils.remove_path_on_error.return_value.__exit__
.assert_called_once_with(None, None, None))
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.fileutils')
@mock.patch('cinder.image.image_utils.fetch')
def test_kwargs(self, mock_fetch, mock_fileutils, mock_info,
mock_check_space, mock_check_size):
ctxt = mock.sentinel.context
image_service = FakeImageService()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 2
run_as_root = mock.sentinel.run_as_root
mock_data = mock_info.return_value
mock_data.file_format = 'test_format'
mock_data.backing_file = None
mock_data.virtual_size = 1
output = image_utils.fetch_verify_image(
ctxt, image_service, image_id, dest, user_id=user_id,
project_id=project_id, size=size, run_as_root=run_as_root)
self.assertIsNone(output)
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
dest, None, None)
mock_fileutils.remove_path_on_error.assert_called_once_with(dest)
(mock_fileutils.remove_path_on_error.return_value.__enter__
.assert_called_once_with())
(mock_fileutils.remove_path_on_error.return_value.__exit__
.assert_called_once_with(None, None, None))
mock_check_size.assert_called_once_with(mock_data.virtual_size,
size, image_id)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.fileutils')
@mock.patch('cinder.image.image_utils.fetch')
def test_format_error(self, mock_fetch, mock_fileutils, mock_info):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
mock_data = mock_info.return_value
mock_data.file_format = None
mock_data.backing_file = None
self.assertRaises(exception.ImageUnacceptable,
image_utils.fetch_verify_image,
ctxt, image_service, image_id, dest)
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.fileutils')
@mock.patch('cinder.image.image_utils.fetch')
def test_backing_file_error(self, mock_fetch, mock_fileutils, mock_info):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
mock_data = mock_info.return_value
mock_data.file_format = 'test_format'
mock_data.backing_file = 'test_backing_file'
self.assertRaises(exception.ImageUnacceptable,
image_utils.fetch_verify_image,
ctxt, image_service, image_id, dest)
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.fileutils')
@mock.patch('cinder.image.image_utils.fetch')
def test_size_error(self, mock_fetch, mock_fileutils, mock_info,
mock_check_size):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
size = 1
mock_data = mock_info.return_value
mock_data.file_format = 'test_format'
mock_data.backing_file = None
mock_data.virtual_size = 2 * units.Gi
mock_check_size.side_effect = exception.ImageUnacceptable(
image_id='fake_image_id', reason='test')
self.assertRaises(exception.ImageUnacceptable,
image_utils.fetch_verify_image,
ctxt, image_service, image_id, dest, size=size)
class TestTemporaryDir(test.TestCase):
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('oslo_utils.fileutils.ensure_tree')
@mock.patch('cinder.image.image_utils.utils.tempdir')
def test_conv_dir_exists(self, mock_tempdir, mock_make,
mock_conf):
mock_conf.image_conversion_dir = mock.sentinel.conv_dir
output = image_utils.temporary_dir()
self.assertTrue(mock_make.called)
mock_tempdir.assert_called_once_with(dir=mock.sentinel.conv_dir)
self.assertEqual(output, mock_tempdir.return_value)
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('oslo_utils.fileutils.ensure_tree')
@mock.patch('cinder.image.image_utils.utils.tempdir')
def test_create_conv_dir(self, mock_tempdir, mock_make,
mock_conf):
mock_conf.image_conversion_dir = mock.sentinel.conv_dir
output = image_utils.temporary_dir()
mock_make.assert_called_once_with(mock.sentinel.conv_dir)
mock_tempdir.assert_called_once_with(dir=mock.sentinel.conv_dir)
self.assertEqual(output, mock_tempdir.return_value)
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('oslo_utils.fileutils.ensure_tree')
@mock.patch('cinder.image.image_utils.utils.tempdir')
def test_no_conv_dir(self, mock_tempdir, mock_make,
mock_conf):
mock_conf.image_conversion_dir = None
output = image_utils.temporary_dir()
self.assertTrue(mock_make.called)
mock_tempdir.assert_called_once_with(dir=None)
self.assertEqual(output, mock_tempdir.return_value)
@ddt.ddt
class TestUploadVolume(test.TestCase):
@ddt.data((mock.sentinel.disk_format, mock.sentinel.disk_format, True),
(mock.sentinel.disk_format, mock.sentinel.disk_format, False),
('ploop', 'parallels', True),
('ploop', 'parallels', False))
@mock.patch('eventlet.tpool.Proxy')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('six.moves.builtins.open')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.os')
def test_diff_format(self, image_format, mock_os, mock_temp, mock_convert,
mock_info, mock_open, mock_conf, mock_proxy):
input_format, output_format, do_compress = image_format
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_meta = {'id': 'test_id',
'disk_format': input_format,
'container_format': mock.sentinel.container_format}
volume_path = mock.sentinel.volume_path
mock_os.name = 'posix'
data = mock_info.return_value
data.file_format = output_format
data.backing_file = None
temp_file = mock_temp.return_value.__enter__.return_value
output = image_utils.upload_volume(ctxt, image_service, image_meta,
volume_path, compress=do_compress)
self.assertIsNone(output)
mock_convert.assert_called_once_with(volume_path,
temp_file,
output_format,
run_as_root=True,
compress=do_compress)
mock_info.assert_called_with(temp_file, run_as_root=True)
self.assertEqual(2, mock_info.call_count)
mock_open.assert_called_once_with(temp_file, 'rb')
mock_proxy.assert_called_once_with(
mock_open.return_value.__enter__.return_value)
image_service.update.assert_called_once_with(
ctxt, image_meta['id'], {}, mock_proxy.return_value)
@mock.patch('eventlet.tpool.Proxy')
@mock.patch('cinder.image.image_utils.utils.temporary_chown')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('six.moves.builtins.open')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.os')
def test_same_format(self, mock_os, mock_temp, mock_convert, mock_info,
mock_open, mock_conf, mock_chown, mock_proxy):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_meta = {'id': 'test_id',
'disk_format': 'raw',
'container_format': mock.sentinel.container_format}
volume_path = mock.sentinel.volume_path
mock_os.name = 'posix'
mock_os.access.return_value = False
output = image_utils.upload_volume(ctxt, image_service, image_meta,
volume_path)
self.assertIsNone(output)
self.assertFalse(mock_convert.called)
self.assertFalse(mock_info.called)
mock_chown.assert_called_once_with(volume_path)
mock_open.assert_called_once_with(volume_path, 'rb')
mock_proxy.assert_called_once_with(
mock_open.return_value.__enter__.return_value)
image_service.update.assert_called_once_with(
ctxt, image_meta['id'], {}, mock_proxy.return_value)
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = True)
@mock.patch('eventlet.tpool.Proxy')
@mock.patch('cinder.image.image_utils.utils.temporary_chown')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('six.moves.builtins.open')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.os')
def test_same_format_compressed(self, mock_os, mock_temp, mock_convert,
mock_info, mock_open, mock_conf,
mock_chown, mock_proxy,
mock_engine_ready, mock_get_engine):
class fakeEngine(object):
def __init__(self):
pass
def compress_img(self, src, dest, run_as_root):
pass
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_meta = {'id': 'test_id',
'disk_format': 'raw',
'container_format': 'compressed'}
mock_conf.allow_compression_on_image_upload = True
volume_path = mock.sentinel.volume_path
mock_os.name = 'posix'
data = mock_info.return_value
data.file_format = 'raw'
data.backing_file = None
temp_file = mock_temp.return_value.__enter__.return_value
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
output = image_utils.upload_volume(ctxt, image_service, image_meta,
volume_path)
self.assertIsNone(output)
mock_convert.assert_called_once_with(volume_path,
temp_file,
'raw',
compress=True,
run_as_root=True)
mock_info.assert_called_with(temp_file, run_as_root=True)
self.assertEqual(2, mock_info.call_count)
mock_open.assert_called_once_with(temp_file, 'rb')
mock_proxy.assert_called_once_with(
mock_open.return_value.__enter__.return_value)
image_service.update.assert_called_once_with(
ctxt, image_meta['id'], {}, mock_proxy.return_value)
mock_engine.compress_img.assert_called()
@mock.patch('eventlet.tpool.Proxy')
@mock.patch('cinder.image.image_utils.utils.temporary_chown')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('six.moves.builtins.open')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.os')
def test_same_format_on_nt(self, mock_os, mock_temp, mock_convert,
mock_info, mock_open, mock_conf, mock_chown,
mock_proxy):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_meta = {'id': 'test_id',
'disk_format': 'raw',
'container_format': 'bare'}
volume_path = mock.sentinel.volume_path
mock_os.name = 'nt'
mock_os.access.return_value = False
output = image_utils.upload_volume(ctxt, image_service, image_meta,
volume_path)
self.assertIsNone(output)
self.assertFalse(mock_convert.called)
self.assertFalse(mock_info.called)
mock_open.assert_called_once_with(volume_path, 'rb')
mock_proxy.assert_called_once_with(
mock_open.return_value.__enter__.return_value)
image_service.update.assert_called_once_with(
ctxt, image_meta['id'], {}, mock_proxy.return_value)
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = True)
@mock.patch('eventlet.tpool.Proxy')
@mock.patch('cinder.image.image_utils.utils.temporary_chown')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('six.moves.builtins.open')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.os')
def test_same_format_on_nt_compressed(self, mock_os, mock_temp,
mock_convert, mock_info,
mock_open, mock_conf,
mock_chown, mock_proxy,
mock_engine_ready, mock_get_engine):
class fakeEngine(object):
def __init__(self):
pass
def compress_img(self, src, dest, run_as_root):
pass
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_meta = {'id': 'test_id',
'disk_format': 'raw',
'container_format': 'compressed'}
mock_conf.allow_compression_on_image_upload = True
volume_path = mock.sentinel.volume_path
mock_os.name = 'posix'
data = mock_info.return_value
data.file_format = 'raw'
data.backing_file = None
temp_file = mock_temp.return_value.__enter__.return_value
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
output = image_utils.upload_volume(ctxt, image_service, image_meta,
volume_path)
self.assertIsNone(output)
mock_convert.assert_called_once_with(volume_path,
temp_file,
'raw',
compress=True,
run_as_root=True)
mock_info.assert_called_with(temp_file, run_as_root=True)
self.assertEqual(2, mock_info.call_count)
mock_open.assert_called_once_with(temp_file, 'rb')
mock_proxy.assert_called_once_with(
mock_open.return_value.__enter__.return_value)
image_service.update.assert_called_once_with(
ctxt, image_meta['id'], {}, mock_proxy.return_value)
mock_engine.compress_img.assert_called()
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('six.moves.builtins.open')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.os')
def test_convert_error(self, mock_os, mock_temp, mock_convert, mock_info,
mock_open, mock_conf):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_meta = {'id': 'test_id',
'disk_format': mock.sentinel.disk_format,
'container_format': mock.sentinel.container_format}
volume_path = mock.sentinel.volume_path
mock_os.name = 'posix'
data = mock_info.return_value
data.file_format = mock.sentinel.other_disk_format
data.backing_file = None
temp_file = mock_temp.return_value.__enter__.return_value
self.assertRaises(exception.ImageUnacceptable,
image_utils.upload_volume,
ctxt, image_service, image_meta, volume_path)
mock_convert.assert_called_once_with(volume_path,
temp_file,
mock.sentinel.disk_format,
run_as_root=True,
compress=True)
mock_info.assert_called_with(temp_file, run_as_root=True)
self.assertEqual(2, mock_info.call_count)
self.assertFalse(image_service.update.called)
class TestFetchToVhd(test.TestCase):
@mock.patch('cinder.image.image_utils.fetch_to_volume_format')
def test_defaults(self, mock_fetch_to):
ctxt = mock.sentinel.context
image_service = mock.sentinel.image_service
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
blocksize = mock.sentinel.blocksize
out_subformat = 'fake_subformat'
output = image_utils.fetch_to_vhd(ctxt, image_service, image_id,
dest, blocksize,
volume_subformat=out_subformat)
self.assertIsNone(output)
mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id,
dest, 'vpc', blocksize,
volume_subformat=out_subformat,
user_id=None,
project_id=None,
run_as_root=True)
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.fetch_to_volume_format')
def test_kwargs(self, mock_fetch_to, mock_check_space):
ctxt = mock.sentinel.context
image_service = mock.sentinel.image_service
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
blocksize = mock.sentinel.blocksize
user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
run_as_root = mock.sentinel.run_as_root
out_subformat = 'fake_subformat'
output = image_utils.fetch_to_vhd(ctxt, image_service, image_id,
dest, blocksize, user_id=user_id,
project_id=project_id,
run_as_root=run_as_root,
volume_subformat=out_subformat)
self.assertIsNone(output)
mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id,
dest, 'vpc', blocksize,
volume_subformat=out_subformat,
user_id=user_id,
project_id=project_id,
run_as_root=run_as_root)
class TestFetchToRaw(test.TestCase):
@mock.patch('cinder.image.image_utils.fetch_to_volume_format')
def test_defaults(self, mock_fetch_to):
ctxt = mock.sentinel.context
image_service = mock.sentinel.image_service
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
blocksize = mock.sentinel.blocksize
output = image_utils.fetch_to_raw(ctxt, image_service, image_id,
dest, blocksize)
self.assertIsNone(output)
mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id,
dest, 'raw', blocksize,
user_id=None, project_id=None,
size=None, run_as_root=True)
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.fetch_to_volume_format')
def test_kwargs(self, mock_fetch_to, mock_check_space):
ctxt = mock.sentinel.context
image_service = mock.sentinel.image_service
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
blocksize = mock.sentinel.blocksize
user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = mock.sentinel.size
run_as_root = mock.sentinel.run_as_root
output = image_utils.fetch_to_raw(ctxt, image_service, image_id,
dest, blocksize, user_id=user_id,
project_id=project_id, size=size,
run_as_root=run_as_root)
self.assertIsNone(output)
mock_fetch_to.assert_called_once_with(ctxt, image_service, image_id,
dest, 'raw', blocksize,
user_id=user_id, size=size,
project_id=project_id,
run_as_root=run_as_root)
class FakeImageService(object):
def __init__(self, db_driver=None, image_service=None, disk_format='raw'):
self.temp_images = None
self.disk_format = disk_format
def show(self, context, image_id):
return {'size': 2 * units.Gi,
'disk_format': self.disk_format,
'container_format': 'bare',
'status': 'active'}
class TestFetchToVolumeFormat(test.TestCase):
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_defaults(self, mock_conf, mock_temp, mock_info, mock_fetch,
mock_is_xen, mock_repl_xen, mock_copy, mock_convert,
mock_check_space):
ctxt = mock.sentinel.context
ctxt.user_id = mock.sentinel.user_id
image_service = FakeImageService()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
out_subformat = None
blocksize = mock.sentinel.blocksize
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
output = image_utils.fetch_to_volume_format(ctxt, image_service,
image_id, dest,
volume_format, blocksize)
self.assertIsNone(output)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=True),
mock.call(tmp, run_as_root=True)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, None, None)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=out_subformat,
run_as_root=True,
src_format='raw')
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_kwargs(self, mock_conf, mock_temp, mock_info, mock_fetch,
mock_is_xen, mock_repl_xen, mock_copy, mock_convert,
mock_check_space, mock_check_size):
ctxt = mock.sentinel.context
image_service = FakeImageService()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
out_subformat = None
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
output = image_utils.fetch_to_volume_format(
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
self.assertIsNone(output)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=out_subformat,
run_as_root=run_as_root,
src_format='raw')
mock_check_size.assert_called_once_with(data.virtual_size,
size, image_id)
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=True)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_convert_from_vhd(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert, mock_check_space,
mock_check_size):
ctxt = mock.sentinel.context
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
out_subformat = None
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
image_service = FakeImageService(disk_format='vhd')
expect_format = 'vpc'
output = image_utils.fetch_to_volume_format(
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
self.assertIsNone(output)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
mock_repl_xen.assert_called_once_with(tmp)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=out_subformat,
run_as_root=run_as_root,
src_format=expect_format)
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_convert_from_iso(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_copy,
mock_convert, mock_check_space,
mock_check_size):
ctxt = mock.sentinel.context
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
out_subformat = None
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
image_service = FakeImageService(disk_format='iso')
expect_format = 'raw'
output = image_utils.fetch_to_volume_format(
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
self.assertIsNone(output)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=out_subformat,
run_as_root=run_as_root,
src_format=expect_format)
@mock.patch('cinder.image.image_utils.check_available_space',
new=mock.Mock())
@mock.patch('cinder.image.image_utils.is_xenserver_format',
new=mock.Mock(return_value=False))
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_temporary_images(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_repl_xen,
mock_copy, mock_convert):
ctxt = mock.sentinel.context
ctxt.user_id = mock.sentinel.user_id
image_service = FakeImageService()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
out_subformat = None
blocksize = mock.sentinel.blocksize
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock.sentinel.tmp
dummy = mock.sentinel.dummy
mock_temp.return_value.__enter__.side_effect = [tmp, dummy]
with image_utils.TemporaryImages.fetch(image_service, ctxt,
image_id) as tmp_img:
self.assertEqual(tmp_img, tmp)
output = image_utils.fetch_to_volume_format(ctxt, image_service,
image_id, dest,
volume_format,
blocksize)
self.assertIsNone(output)
self.assertEqual(2, mock_temp.call_count)
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=True),
mock.call(dummy, force_share=False, run_as_root=True),
mock.call(tmp, run_as_root=True)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, None, None)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=out_subformat,
run_as_root=True,
src_format='raw')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_no_qemu_img_and_is_raw(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert):
ctxt = mock.sentinel.context
image_service = mock.Mock(temp_images=None)
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
tmp = mock_temp.return_value.__enter__.return_value
image_service.show.return_value = {'disk_format': 'raw',
'size': 41126400}
image_size_m = math.ceil(float(41126400) / units.Mi)
output = image_utils.fetch_to_volume_format(
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
self.assertIsNone(output)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_temp.assert_called_once_with()
mock_info.assert_called_once_with(tmp,
force_share=False,
run_as_root=run_as_root)
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
self.assertFalse(mock_repl_xen.called)
mock_copy.assert_called_once_with(tmp, dest, image_size_m,
blocksize)
self.assertFalse(mock_convert.called)
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_no_qemu_img_not_raw(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert):
ctxt = mock.sentinel.context
image_service = mock.Mock()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
tmp = mock_temp.return_value.__enter__.return_value
image_service.show.return_value = {'disk_format': 'not_raw'}
self.assertRaises(
exception.ImageUnacceptable,
image_utils.fetch_to_volume_format,
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_temp.assert_called_once_with()
mock_info.assert_called_once_with(tmp,
force_share=False,
run_as_root=run_as_root)
self.assertFalse(mock_fetch.called)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
self.assertFalse(mock_convert.called)
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_no_qemu_img_no_metadata(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert):
ctxt = mock.sentinel.context
image_service = mock.Mock(temp_images=None)
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
tmp = mock_temp.return_value.__enter__.return_value
image_service.show.return_value = None
self.assertRaises(
exception.ImageUnacceptable,
image_utils.fetch_to_volume_format,
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_temp.assert_called_once_with()
mock_info.assert_called_once_with(tmp,
force_share=False,
run_as_root=run_as_root)
self.assertFalse(mock_fetch.called)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
self.assertFalse(mock_convert.called)
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_size_error(self, mock_conf, mock_temp, mock_info, mock_fetch,
mock_is_xen, mock_repl_xen, mock_copy, mock_convert,
mock_check_size):
ctxt = mock.sentinel.context
image_service = mock.Mock(temp_images=None)
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 1234
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = int(1234.5 * units.Gi)
tmp = mock_temp.return_value.__enter__.return_value
mock_check_size.side_effect = exception.ImageUnacceptable(
image_id='fake_image_id', reason='test')
self.assertRaises(
exception.ImageUnacceptable,
image_utils.fetch_to_volume_format,
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
self.assertFalse(mock_convert.called)
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_qemu_img_parse_error(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert):
ctxt = mock.sentinel.context
image_service = mock.Mock(temp_images=None)
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = None
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
self.assertRaises(
exception.ImageUnacceptable,
image_utils.fetch_to_volume_format,
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
self.assertFalse(mock_convert.called)
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_backing_file_error(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert):
ctxt = mock.sentinel.context
image_service = mock.Mock(temp_images=None)
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = mock.sentinel.backing_file
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
self.assertRaises(
exception.ImageUnacceptable,
image_utils.fetch_to_volume_format,
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
self.assertFalse(mock_convert.called)
@mock.patch('cinder.image.image_utils.check_virtual_size')
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=True)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_xenserver_to_vhd(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert, mock_check_space,
mock_check_size):
ctxt = mock.sentinel.context
image_service = FakeImageService()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
blocksize = mock.sentinel.blocksize
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
output = image_utils.fetch_to_volume_format(
ctxt, image_service, image_id, dest, volume_format, blocksize,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
self.assertIsNone(output)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=run_as_root),
mock.call(tmp, run_as_root=run_as_root)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, user_id, project_id)
mock_repl_xen.assert_called_once_with(tmp)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=None,
run_as_root=run_as_root,
src_format='raw')
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_no_qemu_img_fetch_verify_image(self, mock_conf,
mock_temp, mock_info,
mock_fetch):
ctxt = mock.sentinel.context
image_service = mock.Mock(temp_images=None)
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
ctxt.user_id = user_id = mock.sentinel.user_id
project_id = mock.sentinel.project_id
size = 4321
run_as_root = mock.sentinel.run_as_root
image_service.show.return_value = {'disk_format': 'raw',
'size': 41126400}
image_utils.fetch_verify_image(
ctxt, image_service, image_id, dest,
user_id=user_id, project_id=project_id, size=size,
run_as_root=run_as_root)
image_service.show.assert_called_once_with(ctxt, image_id)
mock_info.assert_called_once_with(dest,
force_share=False,
run_as_root=run_as_root)
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
dest, None, None)
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_get_qemu_data_returns_none(self, mock_conf, mock_temp, mock_info):
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
disk_format_raw = True
has_meta = True
output = image_utils.get_qemu_data(image_id, has_meta,
disk_format_raw, dest,
run_as_root=run_as_root)
self.assertIsNone(output)
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_get_qemu_data_with_image_meta_exception(self, mock_conf,
mock_temp, mock_info):
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
disk_format_raw = False
has_meta = True
self.assertRaises(
exception.ImageUnacceptable,
image_utils.get_qemu_data, image_id, has_meta, disk_format_raw,
dest, run_as_root=run_as_root)
@mock.patch('cinder.image.image_utils.qemu_img_info',
side_effect=processutils.ProcessExecutionError)
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_get_qemu_data_without_image_meta_except(self, mock_conf,
mock_temp, mock_info):
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
run_as_root = mock.sentinel.run_as_root
disk_format_raw = False
has_meta = False
self.assertRaises(
exception.ImageUnacceptable,
image_utils.get_qemu_data, image_id, has_meta, disk_format_raw,
dest, run_as_root=run_as_root)
@mock.patch('cinder.image.accelerator.is_gzip_compressed',
return_value = True)
@mock.patch('cinder.image.accelerator.ImageAccel._get_engine')
@mock.patch('cinder.image.accelerator.ImageAccel.is_engine_ready',
return_value = True)
@mock.patch('cinder.image.image_utils.check_available_space')
@mock.patch('cinder.image.image_utils.convert_image')
@mock.patch('cinder.image.image_utils.volume_utils.copy_volume')
@mock.patch(
'cinder.image.image_utils.replace_xenserver_image_with_coalesced_vhd')
@mock.patch('cinder.image.image_utils.is_xenserver_format',
return_value=False)
@mock.patch('cinder.image.image_utils.fetch')
@mock.patch('cinder.image.image_utils.qemu_img_info')
@mock.patch('cinder.image.image_utils.temporary_file')
@mock.patch('cinder.image.image_utils.CONF')
def test_defaults_compressed(self, mock_conf, mock_temp, mock_info,
mock_fetch, mock_is_xen, mock_repl_xen,
mock_copy, mock_convert, mock_check_space,
mock_engine_ready, mock_get_engine,
mock_gzip_compressed):
class fakeEngine(object):
def __init__(self):
pass
def decompress_img(self, src, dest, run_as_root):
pass
class FakeImageService(object):
def __init__(self, db_driver=None,
image_service=None, disk_format='raw'):
self.temp_images = None
self.disk_format = disk_format
def show(self, context, image_id):
return {'size': 2 * units.Gi,
'disk_format': self.disk_format,
'container_format': 'compressed',
'status': 'active'}
ctxt = mock.sentinel.context
ctxt.user_id = mock.sentinel.user_id
image_service = FakeImageService()
image_id = mock.sentinel.image_id
dest = mock.sentinel.dest
volume_format = mock.sentinel.volume_format
out_subformat = None
blocksize = mock.sentinel.blocksize
data = mock_info.return_value
data.file_format = volume_format
data.backing_file = None
data.virtual_size = 1234
tmp = mock_temp.return_value.__enter__.return_value
mock_engine = mock.Mock(spec=fakeEngine)
mock_get_engine.return_value = mock_engine
output = image_utils.fetch_to_volume_format(ctxt, image_service,
image_id, dest,
volume_format, blocksize)
self.assertIsNone(output)
mock_temp.assert_called_once_with()
mock_info.assert_has_calls([
mock.call(tmp, force_share=False, run_as_root=True),
mock.call(tmp, run_as_root=True)])
mock_fetch.assert_called_once_with(ctxt, image_service, image_id,
tmp, None, None)
self.assertFalse(mock_repl_xen.called)
self.assertFalse(mock_copy.called)
mock_convert.assert_called_once_with(tmp, dest, volume_format,
out_subformat=out_subformat,
run_as_root=True,
src_format='raw')
mock_engine.decompress_img.assert_called()
class TestXenserverUtils(test.TestCase):
def test_is_xenserver_format(self):
image_meta1 = {'disk_format': 'vhd', 'container_format': 'ovf'}
self.assertTrue(image_utils.is_xenserver_format(image_meta1))
image_meta2 = {'disk_format': 'test_disk_format',
'container_format': 'test_cont_format'}
self.assertFalse(image_utils.is_xenserver_format(image_meta2))
@mock.patch('cinder.image.image_utils.utils.execute')
def test_extract_targz(self, mock_exec):
name = mock.sentinel.archive_name
target = mock.sentinel.target
output = image_utils.extract_targz(name, target)
mock_exec.assert_called_once_with('tar', '-xzf', name, '-C', target)
self.assertIsNone(output)
class TestVhdUtils(test.TestCase):
@mock.patch('cinder.image.image_utils.utils.execute')
def test_set_vhd_parent(self, mock_exec):
vhd_path = mock.sentinel.vhd_path
parentpath = mock.sentinel.parentpath
output = image_utils.set_vhd_parent(vhd_path, parentpath)
mock_exec.assert_called_once_with('vhd-util', 'modify', '-n', vhd_path,
'-p', parentpath)
self.assertIsNone(output)
@mock.patch('cinder.image.image_utils.set_vhd_parent')
def test_fix_vhd_chain(self, mock_set_parent):
vhd_chain = (mock.sentinel.first,
mock.sentinel.second,
mock.sentinel.third,
mock.sentinel.fourth,
mock.sentinel.fifth)
output = image_utils.fix_vhd_chain(vhd_chain)
self.assertIsNone(output)
mock_set_parent.assert_has_calls([
mock.call(mock.sentinel.first, mock.sentinel.second),
mock.call(mock.sentinel.second, mock.sentinel.third),
mock.call(mock.sentinel.third, mock.sentinel.fourth),
mock.call(mock.sentinel.fourth, mock.sentinel.fifth)])
@mock.patch('cinder.image.image_utils.utils.execute',
return_value=(98765.43210, mock.sentinel.error))
def test_get_vhd_size(self, mock_exec):
vhd_path = mock.sentinel.vhd_path
output = image_utils.get_vhd_size(vhd_path)
mock_exec.assert_called_once_with('vhd-util', 'query', '-n', vhd_path,
'-v')
self.assertEqual(98765, output)
@mock.patch('cinder.image.image_utils.utils.execute')
def test_resize_vhd(self, mock_exec):
vhd_path = mock.sentinel.vhd_path
size = 387549349
journal = mock.sentinel.journal
output = image_utils.resize_vhd(vhd_path, size, journal)
self.assertIsNone(output)
mock_exec.assert_called_once_with('vhd-util', 'resize', '-n', vhd_path,
'-s', str(size), '-j', journal)
@mock.patch('cinder.image.image_utils.utils.execute')
def test_coalesce_vhd(self, mock_exec):
vhd_path = mock.sentinel.vhd_path
output = image_utils.coalesce_vhd(vhd_path)
self.assertIsNone(output)
mock_exec.assert_called_once_with('vhd-util', 'coalesce', '-n',
vhd_path)
@mock.patch('cinder.image.image_utils.temporary_dir')
@mock.patch('cinder.image.image_utils.coalesce_vhd')
@mock.patch('cinder.image.image_utils.resize_vhd')
@mock.patch('cinder.image.image_utils.get_vhd_size')
@mock.patch('cinder.image.image_utils.utils.execute')
def test_coalesce_chain(self, mock_exec, mock_size, mock_resize,
mock_coal, mock_temp):
vhd_chain = (mock.sentinel.first,
mock.sentinel.second,
mock.sentinel.third,
mock.sentinel.fourth,
mock.sentinel.fifth)
# os.path.join does not work with MagicMock objects on Windows.
mock_temp.return_value.__enter__.return_value = 'fake_temp_dir'
output = image_utils.coalesce_chain(vhd_chain)
self.assertEqual(mock.sentinel.fifth, output)
mock_size.assert_has_calls([
mock.call(mock.sentinel.first),
mock.call(mock.sentinel.second),
mock.call(mock.sentinel.third),
mock.call(mock.sentinel.fourth)])
mock_resize.assert_has_calls([
mock.call(mock.sentinel.second, mock_size.return_value, mock.ANY),
mock.call(mock.sentinel.third, mock_size.return_value, mock.ANY),
mock.call(mock.sentinel.fourth, mock_size.return_value, mock.ANY),
mock.call(mock.sentinel.fifth, mock_size.return_value, mock.ANY)])
mock_coal.assert_has_calls([
mock.call(mock.sentinel.first),
mock.call(mock.sentinel.second),
mock.call(mock.sentinel.third),
mock.call(mock.sentinel.fourth)])
@mock.patch('cinder.image.image_utils.os.path')
def test_discover_vhd_chain(self, mock_path):
directory = '/some/test/directory'
mock_path.join.side_effect = lambda x, y: '/'.join((x, y))
mock_path.exists.side_effect = (True, True, True, False)
output = image_utils.discover_vhd_chain(directory)
expected_output = ['/some/test/directory/0.vhd',
'/some/test/directory/1.vhd',
'/some/test/directory/2.vhd']
self.assertEqual(expected_output, output)
@mock.patch('cinder.image.image_utils.temporary_dir')
@mock.patch('cinder.image.image_utils.os.rename')
@mock.patch('cinder.image.image_utils.fileutils.delete_if_exists')
@mock.patch('cinder.image.image_utils.coalesce_chain')
@mock.patch('cinder.image.image_utils.fix_vhd_chain')
@mock.patch('cinder.image.image_utils.discover_vhd_chain')
@mock.patch('cinder.image.image_utils.extract_targz')
def test_replace_xenserver_image_with_coalesced_vhd(
self, mock_targz, mock_discover, mock_fix, mock_coal, mock_delete,
mock_rename, mock_temp):
image_file = mock.sentinel.image_file
tmp = mock_temp.return_value.__enter__.return_value
output = image_utils.replace_xenserver_image_with_coalesced_vhd(
image_file)
self.assertIsNone(output)
mock_targz.assert_called_once_with(image_file, tmp)
mock_discover.assert_called_once_with(tmp)
mock_fix.assert_called_once_with(mock_discover.return_value)
mock_coal.assert_called_once_with(mock_discover.return_value)
mock_delete.assert_called_once_with(image_file)
mock_rename.assert_called_once_with(mock_coal.return_value, image_file)
class TestCreateTemporaryFile(test.TestCase):
@mock.patch('cinder.image.image_utils.os.close')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.image.image_utils.os.makedirs')
@mock.patch('cinder.image.image_utils.tempfile.mkstemp')
def test_create_temporary_file_no_dir(self, mock_mkstemp, mock_dirs,
mock_conf, mock_close):
mock_conf.image_conversion_dir = None
fd = mock.sentinel.file_descriptor
path = mock.sentinel.absolute_pathname
mock_mkstemp.return_value = (fd, path)
output = image_utils.create_temporary_file()
self.assertEqual(path, output)
mock_mkstemp.assert_called_once_with(dir=None)
mock_close.assert_called_once_with(fd)
@mock.patch('cinder.image.image_utils.os.close')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.image.image_utils.os.makedirs')
@mock.patch('cinder.image.image_utils.tempfile.mkstemp')
def test_create_temporary_file_with_dir(self, mock_mkstemp, mock_dirs,
mock_conf, mock_close):
conv_dir = mock.sentinel.image_conversion_dir
mock_conf.image_conversion_dir = conv_dir
fd = mock.sentinel.file_descriptor
path = mock.sentinel.absolute_pathname
mock_mkstemp.return_value = (fd, path)
output = image_utils.create_temporary_file()
self.assertEqual(path, output)
self.assertTrue(mock_dirs.called)
mock_mkstemp.assert_called_once_with(dir=conv_dir)
mock_close.assert_called_once_with(fd)
@mock.patch('cinder.image.image_utils.os.close')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.image.image_utils.fileutils.ensure_tree')
@mock.patch('cinder.image.image_utils.tempfile.mkstemp')
def test_create_temporary_file_and_dir(self, mock_mkstemp, mock_dirs,
mock_conf, mock_close):
conv_dir = mock.sentinel.image_conversion_dir
mock_conf.image_conversion_dir = conv_dir
fd = mock.sentinel.file_descriptor
path = mock.sentinel.absolute_pathname
mock_mkstemp.return_value = (fd, path)
output = image_utils.create_temporary_file()
self.assertEqual(path, output)
mock_dirs.assert_called_once_with(conv_dir)
mock_mkstemp.assert_called_once_with(dir=conv_dir)
mock_close.assert_called_once_with(fd)
@mock.patch('cinder.image.image_utils.os.remove')
@mock.patch('cinder.image.image_utils.os.path.join')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.image.image_utils.os.listdir')
@mock.patch('cinder.image.image_utils.os.path.exists', return_value=True)
def test_cleanup_temporary_file(self, mock_path, mock_listdir, mock_conf,
mock_join, mock_remove):
mock_listdir.return_value = ['tmphost@backend1', 'tmphost@backend2']
conv_dir = mock.sentinel.image_conversion_dir
mock_conf.image_conversion_dir = conv_dir
mock_join.return_value = '/test/tmp/tmphost@backend1'
image_utils.cleanup_temporary_file('host@backend1')
mock_listdir.assert_called_once_with(conv_dir)
mock_remove.assert_called_once_with('/test/tmp/tmphost@backend1')
@mock.patch('cinder.image.image_utils.os.remove')
@mock.patch('cinder.image.image_utils.os.listdir')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.image.image_utils.os.path.exists', return_value=False)
def test_cleanup_temporary_file_with_not_exist_path(self, mock_path,
mock_conf,
mock_listdir,
mock_remove):
conv_dir = mock.sentinel.image_conversion_dir
mock_conf.image_conversion_dir = conv_dir
image_utils.cleanup_temporary_file('host@backend1')
self.assertFalse(mock_listdir.called)
self.assertFalse(mock_remove.called)
@mock.patch('cinder.image.image_utils.os.remove')
@mock.patch('cinder.image.image_utils.os.path.join')
@mock.patch('cinder.image.image_utils.CONF')
@mock.patch('cinder.image.image_utils.os.listdir')
@mock.patch('cinder.image.image_utils.os.path.exists', return_value=True)
def test_cleanup_temporary_file_with_exception(self, mock_path,
mock_listdir, mock_conf,
mock_join, mock_remove):
mock_listdir.return_value = ['tmphost@backend1', 'tmphost@backend2']
conv_dir = mock.sentinel.image_conversion_dir
mock_conf.image_conversion_dir = conv_dir
mock_join.return_value = '/test/tmp/tmphost@backend1'
mock_remove.side_effect = OSError
image_utils.cleanup_temporary_file('host@backend1')
mock_listdir.assert_called_once_with(conv_dir)
mock_remove.assert_called_once_with('/test/tmp/tmphost@backend1')
class TestTemporaryFileContextManager(test.TestCase):
@mock.patch('cinder.image.image_utils.create_temporary_file',
return_value=mock.sentinel.temporary_file)
@mock.patch('cinder.image.image_utils.fileutils.delete_if_exists')
def test_temporary_file(self, mock_delete, mock_create):
with image_utils.temporary_file() as tmp_file:
self.assertEqual(mock.sentinel.temporary_file, tmp_file)
self.assertFalse(mock_delete.called)
mock_delete.assert_called_once_with(mock.sentinel.temporary_file)
class TestImageUtils(test.TestCase):
def test_get_virtual_size(self):
image_id = fake.IMAGE_ID
virtual_size = 1073741824
volume_size = 2
virt_size = image_utils.check_virtual_size(virtual_size,
volume_size,
image_id)
self.assertEqual(1, virt_size)
def test_get_bigger_virtual_size(self):
image_id = fake.IMAGE_ID
virtual_size = 3221225472
volume_size = 2
self.assertRaises(exception.ImageUnacceptable,
image_utils.check_virtual_size,
virtual_size,
volume_size,
image_id)
def test_decode_cipher(self):
expected = {'cipher_alg': 'aes-256',
'cipher_mode': 'xts',
'ivgen_alg': 'essiv'}
result = image_utils.decode_cipher('aes-xts-essiv', 256)
self.assertEqual(expected, result)
| [
[
[
616,
621
],
[
18014,
18019
],
[
18728,
18733
]
],
[
[
629,
633
],
[
62672,
62676
]
],
[
[
642,
654
],
[
19601,
19613
]
],
[
[
662,
665
],
[
5035,
5038
],
[
12692,
12695
],
[
31471,
31474
],
[
31523,
31526
]
],
[
[
673,
677
],
[
1006,
1010
],
[
1046,
1050
],
[
1099,
1103
],
[
1706,
1710
],
[
1746,
1750
],
[
1799,
1803
],
[
2540,
2544
],
[
2587,
2591
],
[
2640,
2644
],
[
3271,
3275
],
[
3775,
3779
],
[
4171,
4175
],
[
5087,
5091
],
[
5145,
5149
],
[
5185,
5189
],
[
6633,
6637
],
[
6691,
6695
],
[
6731,
6735
],
[
8296,
8300
],
[
8403,
8407
],
[
8461,
8465
],
[
8501,
8505
],
[
9538,
9542
],
[
9645,
9649
],
[
9703,
9707
],
[
9743,
9747
],
[
10835,
10839
],
[
10893,
10897
],
[
10933,
10937
],
[
11819,
11823
],
[
11877,
11881
],
[
11917,
11921
],
[
11983,
11987
],
[
12719,
12723
],
[
12777,
12781
],
[
12817,
12821
],
[
13717,
13721
],
[
13766,
13770
],
[
13873,
13877
],
[
13931,
13935
],
[
13971,
13975
],
[
14037,
14041
],
[
14096,
14100
],
[
14150,
14154
],
[
14202,
14206
],
[
14260,
14264
],
[
15571,
15575
],
[
15965,
15969
],
[
16412,
16416
],
[
16452,
16456
],
[
16479,
16483
],
[
19741,
19745
],
[
19797,
19801
],
[
21166,
21170
],
[
21852,
21856
],
[
22834,
22838
],
[
22877,
22881
],
[
22919,
22923
],
[
22975,
22979
],
[
24477,
24481
],
[
24535,
24539
],
[
24589,
24593
],
[
25761,
25765
],
[
25824,
25828
],
[
25890,
25894
],
[
25948,
25952
],
[
26002,
26006
],
[
27440,
27444
],
[
27498,
27502
],
[
27552,
27556
],
[
28119,
28123
],
[
28177,
28181
],
[
28231,
28235
],
[
28828,
28832
],
[
28891,
28895
],
[
28949,
28953
],
[
29003,
29007
],
[
29853,
29857
],
[
29902,
29906
],
[
29954,
29958
],
[
30400,
30404
],
[
30449,
30453
],
[
30501,
30505
],
[
30971,
30975
],
[
31020,
31024
],
[
31072,
31076
],
[
31533,
31537
],
[
31560,
31564
],
[
31609,
31613
],
[
31636,
31640
],
[
31765,
31769
],
[
31805,
31809
],
[
31854,
31858
],
[
31897,
31901
],
[
31955,
31959
],
[
32013,
32017
],
[
32072,
32076
],
[
33719,
33723
],
[
33759,
33763
],
[
33825,
33829
],
[
33874,
33878
],
[
33917,
33921
],
[
33975,
33979
],
[
34033,
34037
],
[
34092,
34096
],
[
35237,
35241
],
[
35304,
35308
],
[
35412,
35416
],
[
35452,
35456
],
[
35518,
35522
],
[
35567,
35571
],
[
35610,
35614
],
[
35668,
35672
],
[
35726,
35730
],
[
35785,
35789
],
[
37805,
37809
],
[
37845,
37849
],
[
37911,
37915
],
[
37960,
37964
],
[
38003,
38007
],
[
38061,
38065
],
[
38119,
38123
],
[
38178,
38182
],
[
39283,
39287
],
[
39350,
39354
],
[
39458,
39462
],
[
39498,
39502
],
[
39564,
39568
],
[
39613,
39617
],
[
39656,
39660
],
[
39714,
39718
],
[
39772,
39776
],
[
39831,
39835
],
[
41917,
41921
],
[
41966,
41970
],
[
42009,
42013
],
[
42067,
42071
],
[
42125,
42129
],
[
42184,
42188
],
[
43583,
43587
],
[
44598,
44602
],
[
44664,
44668
],
[
46032,
46036
],
[
46819,
46823
],
[
46885,
46889
],
[
48516,
48520
],
[
48582,
48586
],
[
48640,
48644
],
[
48709,
48713
],
[
48805,
48809
],
[
48905,
48909
],
[
48955,
48959
],
[
49013,
49017
],
[
49072,
49076
],
[
50786,
50790
],
[
50849,
50853
],
[
50915,
50919
],
[
50973,
50977
],
[
51042,
51046
],
[
51138,
51142
],
[
51238,
51242
],
[
51288,
51292
],
[
51346,
51350
],
[
51405,
51409
],
[
53424,
53428
],
[
53487,
53491
],
[
53553,
53557
],
[
53611,
53615
],
[
53680,
53684
],
[
53776,
53780
],
[
53875,
53879
],
[
53925,
53929
],
[
53983,
53987
],
[
54042,
54046
],
[
56049,
56053
],
[
56112,
56116
],
[
56178,
56182
],
[
56236,
56240
],
[
56305,
56309
],
[
56405,
56409
],
[
56455,
56459
],
[
56513,
56517
],
[
56572,
56576
],
[
58513,
58517
],
[
58594,
58598
],
[
58612,
58616
],
[
58691,
58695
],
[
58727,
58731
],
[
58785,
58789
],
[
58854,
58858
],
[
58950,
58954
],
[
59000,
59004
],
[
59058,
59062
],
[
59117,
59121
],
[
61221,
61225
],
[
61279,
61283
],
[
61348,
61352
],
[
61444,
61448
],
[
61544,
61548
],
[
61594,
61598
],
[
61716,
61720
],
[
61775,
61779
],
[
63622,
63626
],
[
63680,
63684
],
[
63749,
63753
],
[
63845,
63849
],
[
63945,
63949
],
[
63995,
63999
],
[
64117,
64121
],
[
64176,
64180
],
[
65718,
65722
],
[
65776,
65780
],
[
65845,
65849
],
[
65941,
65945
],
[
66041,
66045
],
[
66091,
66095
],
[
66213,
66217
],
[
66272,
66276
],
[
67835,
67839
],
[
67898,
67902
],
[
67956,
67960
],
[
68025,
68029
],
[
68121,
68125
],
[
68221,
68225
],
[
68271,
68275
],
[
68329,
68333
],
[
68388,
68392
],
[
70253,
70257
],
[
70311,
70315
],
[
70380,
70384
],
[
70476,
70480
],
[
70576,
70580
],
[
70626,
70630
],
[
70684,
70688
],
[
70743,
70747
],
[
72473,
72477
],
[
72531,
72535
],
[
72600,
72604
],
[
72696,
72700
],
[
72796,
72800
],
[
72846,
72850
],
[
72904,
72908
],
[
72963,
72967
],
[
74718,
74722
],
[
74781,
74785
],
[
74847,
74851
],
[
74905,
74909
],
[
74974,
74978
],
[
75070,
75074
],
[
75169,
75173
],
[
75219,
75223
],
[
75277,
75281
],
[
75336,
75340
],
[
77250,
77254
],
[
77300,
77304
],
[
77422,
77426
],
[
77481,
77485
],
[
78745,
78749
],
[
78867,
78871
],
[
78926,
78930
],
[
79468,
79472
],
[
79590,
79594
],
[
79649,
79653
],
[
80212,
80216
],
[
80334,
80338
],
[
80393,
80397
],
[
80958,
80962
],
[
81058,
81062
],
[
81125,
81129
],
[
81233,
81237
],
[
81299,
81303
],
[
81357,
81361
],
[
81426,
81430
],
[
81522,
81526
],
[
81622,
81626
],
[
81672,
81676
],
[
81730,
81734
],
[
81789,
81793
],
[
84895,
84899
],
[
85285,
85289
],
[
85720,
85724
],
[
86441,
86445
],
[
86537,
86541
],
[
86872,
86876
],
[
87335,
87339
],
[
87692,
87696
],
[
87750,
87754
],
[
87807,
87811
],
[
87862,
87866
],
[
87919,
87923
],
[
89352,
89356
],
[
89912,
89916
],
[
89970,
89974
],
[
90024,
90028
],
[
90095,
90099
],
[
90154,
90158
],
[
90212,
90216
],
[
90275,
90279
],
[
91179,
91183
],
[
91232,
91236
],
[
91281,
91285
],
[
91337,
91341
],
[
91917,
91921
],
[
91970,
91974
],
[
92019,
92023
],
[
92075,
92079
],
[
92763,
92767
],
[
92816,
92820
],
[
92865,
92869
],
[
92931,
92935
],
[
93627,
93631
],
[
93681,
93685
],
[
93738,
93742
],
[
93787,
93791
],
[
93842,
93846
],
[
94492,
94496
],
[
94546,
94550
],
[
94601,
94605
],
[
94650,
94654
],
[
95265,
95269
],
[
95319,
95323
],
[
95376,
95380
],
[
95425,
95429
],
[
95480,
95484
],
[
96308,
96312
],
[
96398,
96402
],
[
96433,
96437
],
[
1209,
1213
],
[
1246,
1250
],
[
1284,
1288
],
[
1918,
1922
],
[
1955,
1959
],
[
1993,
1997
],
[
2765,
2769
],
[
2802,
2806
],
[
2840,
2844
],
[
3421,
3425
],
[
5397,
5401
],
[
5433,
5437
],
[
5473,
5477
],
[
5623,
5627
],
[
6192,
6196
],
[
6999,
7003
],
[
7035,
7039
],
[
7075,
7079
],
[
7214,
7218
],
[
7855,
7859
],
[
8838,
8842
],
[
8874,
8878
],
[
8914,
8918
],
[
10146,
10150
],
[
10182,
10186
],
[
10222,
10226
],
[
11131,
11135
],
[
11167,
11171
],
[
11207,
11211
],
[
11300,
11304
],
[
12184,
12188
],
[
12220,
12224
],
[
13117,
13121
],
[
13153,
13157
],
[
14901,
14905
],
[
15042,
15046
],
[
15663,
15667
],
[
15699,
15703
],
[
16060,
16064
],
[
16096,
16100
],
[
16611,
16615
],
[
16657,
16661
],
[
16688,
16692
],
[
16757,
16761
],
[
16802,
16806
],
[
16848,
16852
],
[
16928,
16932
],
[
17826,
17830
],
[
17872,
17876
],
[
17903,
17907
],
[
18121,
18125
],
[
18166,
18170
],
[
18206,
18210
],
[
18278,
18282
],
[
18588,
18592
],
[
18634,
18638
],
[
18665,
18669
],
[
18887,
18891
],
[
18932,
18936
],
[
18972,
18976
],
[
19044,
19048
],
[
19969,
19973
],
[
20002,
20006
],
[
21295,
21299
],
[
21984,
21988
],
[
23186,
23190
],
[
24717,
24721
],
[
24763,
24767
],
[
24794,
24798
],
[
24832,
24836
],
[
26183,
26187
],
[
26267,
26271
],
[
26305,
26309
],
[
26342,
26346
],
[
26385,
26389
],
[
26449,
26453
],
[
27684,
27688
],
[
27730,
27734
],
[
27761,
27765
],
[
27799,
27803
],
[
28369,
28373
],
[
28415,
28419
],
[
28446,
28450
],
[
28484,
28488
],
[
29174,
29178
],
[
29220,
29224
],
[
29251,
29255
],
[
29289,
29293
],
[
30149,
30153
],
[
30310,
30314
],
[
30696,
30700
],
[
30808,
30812
],
[
30881,
30885
],
[
32343,
32347
],
[
32389,
32393
],
[
32533,
32537
],
[
32587,
32591
],
[
34297,
34301
],
[
34343,
34347
],
[
34480,
34484
],
[
34534,
34538
],
[
36291,
36295
],
[
36337,
36341
],
[
36569,
36573
],
[
36818,
36822
],
[
38426,
38430
],
[
38472,
38476
],
[
38639,
38643
],
[
40403,
40407
],
[
40449,
40453
],
[
40681,
40685
],
[
40930,
40934
],
[
42369,
42373
],
[
42415,
42419
],
[
42503,
42507
],
[
42572,
42576
],
[
42626,
42630
],
[
42748,
42752
],
[
43219,
43223
],
[
43704,
43708
],
[
43750,
43754
],
[
43797,
43801
],
[
43835,
43839
],
[
43874,
43878
],
[
44801,
44805
],
[
44847,
44851
],
[
44894,
44898
],
[
44932,
44936
],
[
44971,
44975
],
[
45013,
45017
],
[
45056,
45060
],
[
45103,
45107
],
[
46153,
46157
],
[
46199,
46203
],
[
46246,
46250
],
[
46284,
46288
],
[
46323,
46327
],
[
47022,
47026
],
[
47068,
47072
],
[
47115,
47119
],
[
47153,
47157
],
[
47192,
47196
],
[
47234,
47238
],
[
47277,
47281
],
[
47317,
47321
],
[
47358,
47362
],
[
49320,
49324
],
[
49365,
49369
],
[
49449,
49453
],
[
49487,
49491
],
[
49530,
49534
],
[
49607,
49611
],
[
50185,
50189
],
[
50250,
50254
],
[
51664,
51668
],
[
51748,
51752
],
[
51786,
51790
],
[
51829,
51833
],
[
51906,
51910
],
[
51963,
51967
],
[
52006,
52010
],
[
52073,
52077
],
[
52662,
52666
],
[
52734,
52738
],
[
54361,
54365
],
[
54402,
54406
],
[
54440,
54444
],
[
54483,
54487
],
[
54560,
54564
],
[
54617,
54621
],
[
54660,
54664
],
[
54727,
54731
],
[
55406,
55410
],
[
55478,
55482
],
[
56876,
56880
],
[
56917,
56921
],
[
56955,
56959
],
[
56998,
57002
],
[
57075,
57079
],
[
57132,
57136
],
[
57175,
57179
],
[
57242,
57246
],
[
57921,
57925
],
[
57993,
57997
],
[
59358,
59362
],
[
59403,
59407
],
[
59487,
59491
],
[
59525,
59529
],
[
59568,
59572
],
[
59645,
59649
],
[
59829,
59833
],
[
59863,
59867
],
[
60553,
60557
],
[
60618,
60622
],
[
60685,
60689
],
[
62047,
62051
],
[
62093,
62097
],
[
62140,
62144
],
[
62178,
62182
],
[
62221,
62225
],
[
62269,
62273
],
[
62326,
62330
],
[
62369,
62373
],
[
62436,
62440
],
[
64439,
64443
],
[
64485,
64489
],
[
64516,
64520
],
[
64554,
64558
],
[
64597,
64601
],
[
64645,
64649
],
[
64687,
64691
],
[
64730,
64734
],
[
64797,
64801
],
[
66547,
66551
],
[
66593,
66597
],
[
66640,
66644
],
[
66678,
66682
],
[
66721,
66725
],
[
66769,
66773
],
[
66826,
66830
],
[
66869,
66873
],
[
66936,
66940
],
[
68641,
68645
],
[
68687,
68691
],
[
68734,
68738
],
[
68772,
68776
],
[
68815,
68819
],
[
68863,
68867
],
[
68920,
68924
],
[
68963,
68967
],
[
69030,
69034
],
[
69854,
69858
],
[
69926,
69930
],
[
71009,
71013
],
[
71055,
71059
],
[
71102,
71106
],
[
71140,
71144
],
[
71183,
71187
],
[
71231,
71235
],
[
71288,
71292
],
[
71331,
71335
],
[
71398,
71402
],
[
72074,
72078
],
[
72146,
72150
],
[
73223,
73227
],
[
73269,
73273
],
[
73316,
73320
],
[
73354,
73358
],
[
73397,
73401
],
[
73445,
73449
],
[
73502,
73506
],
[
73545,
73549
],
[
73612,
73616
],
[
73746,
73750
],
[
74319,
74323
],
[
74391,
74395
],
[
75655,
75659
],
[
75739,
75743
],
[
75777,
75781
],
[
75820,
75824
],
[
75868,
75872
],
[
75925,
75929
],
[
75968,
75972
],
[
76035,
76039
],
[
76624,
76628
],
[
76696,
76700
],
[
77724,
77728
],
[
77770,
77774
],
[
77817,
77821
],
[
77855,
77859
],
[
77907,
77911
],
[
77950,
77954
],
[
78017,
78021
],
[
79069,
79073
],
[
79107,
79111
],
[
79148,
79152
],
[
79858,
79862
],
[
79896,
79900
],
[
79937,
79941
],
[
80602,
80606
],
[
80640,
80644
],
[
80681,
80685
],
[
82859,
82863
],
[
82904,
82908
],
[
82988,
82992
],
[
83026,
83030
],
[
83069,
83073
],
[
83146,
83150
],
[
83399,
83403
],
[
83825,
83829
],
[
83890,
83894
],
[
85008,
85012
],
[
85052,
85056
],
[
85403,
85407
],
[
85447,
85451
],
[
85846,
85850
],
[
85888,
85892
],
[
85931,
85935
],
[
85973,
85977
],
[
86016,
86020
],
[
86182,
86186
],
[
86192,
86196
],
[
86213,
86217
],
[
86248,
86252
],
[
86258,
86262
],
[
86280,
86284
],
[
86314,
86318
],
[
86324,
86328
],
[
86345,
86349
],
[
86380,
86384
],
[
86390,
86394
],
[
86412,
86416
],
[
86622,
86626
],
[
86986,
86990
],
[
87052,
87056
],
[
87451,
87455
],
[
88113,
88117
],
[
88155,
88159
],
[
88198,
88202
],
[
88240,
88244
],
[
88283,
88287
],
[
88531,
88535
],
[
88609,
88613
],
[
88619,
88623
],
[
88653,
88657
],
[
88663,
88667
],
[
88698,
88702
],
[
88708,
88712
],
[
88742,
88746
],
[
88752,
88756
],
[
88827,
88831
],
[
88837,
88841
],
[
88883,
88887
],
[
88906,
88910
],
[
88916,
88920
],
[
88961,
88965
],
[
88984,
88988
],
[
88994,
88998
],
[
89040,
89044
],
[
89063,
89067
],
[
89073,
89077
],
[
89118,
89122
],
[
89179,
89183
],
[
89189,
89193
],
[
89223,
89227
],
[
89233,
89237
],
[
89268,
89272
],
[
89278,
89282
],
[
89312,
89316
],
[
89322,
89326
],
[
90522,
90526
],
[
91591,
91595
],
[
91636,
91640
],
[
92293,
92297
],
[
92391,
92395
],
[
92436,
92440
],
[
93147,
93151
],
[
93245,
93249
],
[
93290,
93294
],
[
94150,
94154
],
[
95023,
95027
],
[
95869,
95873
],
[
96644,
96648
],
[
96777,
96781
]
],
[
[
707,
719
],
[
61675,
61687
],
[
64076,
64088
],
[
66172,
66184
],
[
77381,
77393
],
[
78826,
78838
],
[
79549,
79561
],
[
80293,
80305
],
[
15142,
15154
],
[
15250,
15262
]
],
[
[
743,
748
],
[
29489,
29494
],
[
48324,
48329
],
[
62700,
62705
],
[
69210,
69215
],
[
82674,
82679
]
],
[
[
771,
779
],
[
19951,
19959
]
],
[
[
800,
809
],
[
4562,
4571
],
[
17938,
17947
],
[
18340,
18349
],
[
19111,
19120
],
[
20684,
20693
],
[
22613,
22622
],
[
27963,
27972
],
[
28672,
28681
],
[
29537,
29546
],
[
29646,
29655
],
[
42906,
42915
],
[
64993,
65002
],
[
67110,
67119
],
[
69319,
69328
],
[
69441,
69450
],
[
71661,
71670
],
[
73906,
73915
],
[
80058,
80067
],
[
80804,
80813
],
[
97365,
97374
]
],
[
[
835,
846
],
[
3793,
3804
],
[
4189,
4200
],
[
14278,
14289
],
[
1375,
1386
],
[
1614,
1625
],
[
2084,
2095
],
[
2448,
2459
],
[
2959,
2970
],
[
3179,
3190
],
[
3549,
3560
],
[
4052,
4063
],
[
4625,
4636
],
[
5755,
5766
],
[
6325,
6336
],
[
7346,
7357
],
[
7988,
7999
],
[
9052,
9063
],
[
10349,
10360
],
[
11432,
11443
],
[
12338,
12349
],
[
13264,
13275
],
[
15312,
15323
],
[
15735,
15746
],
[
16132,
16143
],
[
17046,
17057
],
[
18393,
18404
],
[
19224,
19235
],
[
20757,
20768
],
[
21671,
21682
],
[
22672,
22683
],
[
23860,
23871
],
[
24052,
24063
],
[
24996,
25007
],
[
26655,
26666
],
[
28018,
28029
],
[
28727,
28738
],
[
29701,
29712
],
[
30190,
30201
],
[
30737,
30748
],
[
31282,
31293
],
[
32840,
32851
],
[
34653,
34664
],
[
36914,
36925
],
[
38755,
38766
],
[
41026,
41037
],
[
42961,
42972
],
[
43957,
43968
],
[
45188,
45199
],
[
46365,
46376
],
[
47402,
47413
],
[
49855,
49866
],
[
52323,
52334
],
[
55067,
55078
],
[
57582,
57593
],
[
59965,
59976
],
[
60153,
60164
],
[
62728,
62739
],
[
65034,
65045
],
[
67151,
67162
],
[
69482,
69493
],
[
71702,
71713
],
[
73947,
73958
],
[
76285,
76296
],
[
78179,
78190
],
[
79247,
79258
],
[
80099,
80110
],
[
80845,
80856
],
[
83495,
83506
],
[
84650,
84661
],
[
84843,
84854
],
[
85091,
85102
],
[
85490,
85501
],
[
86055,
86066
],
[
86663,
86674
],
[
87092,
87103
],
[
87492,
87503
],
[
88467,
88478
],
[
89642,
89653
],
[
90625,
90636
],
[
91733,
91744
],
[
92533,
92544
],
[
93387,
93398
],
[
94305,
94316
],
[
95116,
95127
],
[
96066,
96077
],
[
96573,
96584
],
[
96994,
97005
],
[
97420,
97431
],
[
97748,
97759
]
],
[
[
866,
870
],
[
985,
989
],
[
5066,
5070
],
[
15550,
15554
],
[
16391,
16395
],
[
19719,
19723
],
[
24456,
24460
],
[
29832,
29836
],
[
31502,
31506
],
[
43562,
43566
],
[
46011,
46015
],
[
48495,
48499
],
[
84498,
84502
],
[
85264,
85268
],
[
91158,
91162
],
[
96287,
96291
],
[
96830,
96834
]
],
[
[
901,
923
],
[
96902,
96906
],
[
97267,
97271
]
],
[
[
950,
960
],
[
5571,
5581
],
[
7162,
7172
]
],
[
[
969,
984
]
],
[
[
5049,
5065
]
],
[
[
15534,
15549
]
],
[
[
16381,
16390
]
],
[
[
19384,
19396
],
[
23827,
23839
]
],
[
[
19500,
19511
],
[
20643,
20654
]
],
[
[
19694,
19718
]
],
[
[
24440,
24455
]
],
[
[
29815,
29831
]
],
[
[
31485,
31501
]
],
[
[
43547,
43561
]
],
[
[
45996,
46010
]
],
[
[
48080,
48096
],
[
26229,
26245
],
[
49411,
49427
],
[
51710,
51726
],
[
54983,
54999
],
[
57498,
57514
],
[
59449,
59465
],
[
75701,
75717
]
],
[
[
48471,
48494
]
],
[
[
84479,
84497
]
],
[
[
85251,
85263
]
],
[
[
91134,
91157
]
],
[
[
96255,
96286
]
],
[
[
96815,
96829
]
]
] |
import torch
import torch.nn as nn
import torch.nn.functional as F
class MyAwesomeModel(nn.Module):
def __init__(self, n_classes):
super(MyAwesomeModel, self).__init__()
self.feature_extractor = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=6, kernel_size=4, stride=1),
nn.Tanh(),
nn.AvgPool2d(kernel_size=2),
nn.Conv2d(in_channels=6, out_channels=16, kernel_size=4, stride=1),
nn.Tanh(),
nn.AvgPool2d(kernel_size=2),
nn.Conv2d(in_channels=16, out_channels=120, kernel_size=4, stride=1),
nn.Tanh(),
)
self.classifier = nn.Sequential(
nn.Linear(in_features=120, out_features=84),
nn.Tanh(),
nn.Linear(in_features=84, out_features=n_classes),
)
def forward(self, x, return_features=False):
x = self.feature_extractor(x)
x = torch.flatten(x, 1)
logits = self.classifier(x)
probs = F.log_softmax(logits, dim=1)
if return_features:
return x
else:
return probs
| [
[
[
7,
12
],
[
930,
935
]
],
[
[
20,
34
],
[
90,
92
],
[
218,
220
],
[
245,
247
],
[
324,
326
],
[
347,
349
],
[
388,
390
],
[
468,
470
],
[
491,
493
],
[
532,
534
],
[
614,
616
],
[
662,
664
],
[
689,
691
],
[
746,
748
],
[
769,
771
]
],
[
[
42,
66
],
[
1002,
1003
]
],
[
[
75,
89
],
[
151,
165
]
]
] |
from adventofcode.year_2021.day_2021_01 import readable, short
def test_readable_part_one():
answer = readable.part1()
assert answer == 1616
def test_readable_part_two():
answer = readable.part2()
assert answer == 1645
def test_short_part_one():
answer = short.part1()
assert answer == 1616
def test_short_part_two():
answer = short.part2()
assert answer == 1645
| [
[
[
47,
55
],
[
108,
116
],
[
196,
204
]
],
[
[
57,
62
],
[
281,
286
],
[
363,
368
]
],
[
[
69,
91
]
],
[
[
157,
179
]
],
[
[
245,
264
]
],
[
[
327,
346
]
]
] |
"""djangoecommerce URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from core.views import index
urlpatterns = [
path('', index),
path('admin/', admin.site.urls),
]
| [
[
[
666,
671
],
[
788,
793
]
],
[
[
696,
700
],
[
752,
756
],
[
773,
777
]
],
[
[
725,
730
],
[
761,
766
]
],
[
[
732,
743
]
]
] |
from stable_baselines.ppo2.run_mujoco import eval_return
import cma
import numpy as np
from stable_baselines.low_dim_analysis.eval_util import *
from stable_baselines.low_dim_analysis.common import do_pca, plot_2d, \
dump_rows_write_csv, generate_run_dir, do_proj_on_first_n_IPCA, get_allinone_concat_df
from sklearn.decomposition import IncrementalPCA
from stable_baselines import logger
import pandas as pd
from sklearn.decomposition import PCA
from joblib import Parallel, delayed
from matplotlib import pyplot as plt
import time
import os
from stable_baselines.common.cmd_util import mujoco_arg_parser
from stable_baselines.low_dim_analysis.common_parser import get_common_parser
from numpy import linalg as LA
def plot_cma_returns(plot_dir_alg, name, mean_rets, min_rets, max_rets, show):
X = np.arange(len(mean_rets))
fig, ax = plt.subplots()
plt.xlabel('num of eval')
plt.ylabel('mean returns with min and max filled')
ax.plot(X, mean_rets)
ax.fill_between(X, min_rets, max_rets, alpha=0.5)
file_path = f"{plot_dir_alg}/{name}.pdf"
if os.path.isfile(file_path):
os.remove(file_path)
logger.log(f"saving cma plot to {file_path}")
fig.savefig(file_path, dpi=300,
bbox_inches='tight', format='pdf')
if show: plt.show()
def do_cma(cma_args, first_n_pcs, orgin_param, save_dir, starting_coord, var):
tic = time.time()
#TODO better starting locations, record how many samples,
logger.log(f"CMAES STARTING :{starting_coord}")
es = cma.CMAEvolutionStrategy(starting_coord, var)
total_num_of_evals = 0
total_num_timesteps = 0
mean_rets = []
min_rets = []
max_rets = []
eval_returns = None
optimization_path = []
while total_num_timesteps < cma_args.cma_num_timesteps and not es.stop():
solutions = es.ask()
optimization_path.extend(solutions)
thetas = [np.matmul(coord, first_n_pcs) + orgin_param for coord in solutions]
logger.log(f"current time steps num: {total_num_timesteps} total time steps: {cma_args.cma_num_timesteps}")
eval_returns = Parallel(n_jobs=cma_args.cores_to_use) \
(delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for
(i, theta) in enumerate(thetas))
mean_rets.append(np.mean(eval_returns))
min_rets.append(np.min(eval_returns))
max_rets.append(np.max(eval_returns))
total_num_of_evals += len(eval_returns)
total_num_timesteps += cma_args.eval_num_timesteps * len(eval_returns)
logger.log(f"current eval returns: {str(eval_returns)}")
logger.log(f"total timesteps so far: {total_num_timesteps}")
negative_eval_returns = [-r for r in eval_returns]
es.tell(solutions, negative_eval_returns)
es.logger.add() # write data to disc to be plotted
es.disp()
toc = time.time()
logger.log(f"####################################CMA took {toc-tic} seconds")
es_logger = es.logger
if not hasattr(es_logger, 'xmean'):
es_logger.load()
n_comp_used = first_n_pcs.shape[0]
optimization_path_mean = np.vstack((starting_coord, es_logger.xmean[:,5:5+n_comp_used]))
return mean_rets, min_rets, max_rets, np.array(optimization_path), np.array(optimization_path_mean)
def main():
import sys
logger.log(sys.argv)
common_arg_parser = get_common_parser()
cma_args, cma_unknown_args = common_arg_parser.parse_known_args()
origin = "mean_param"
this_run_dir = get_dir_path_for_this_run(cma_args)
traj_params_dir_name = get_full_params_dir(this_run_dir)
intermediate_data_dir = get_intermediate_data_dir(this_run_dir)
save_dir = get_save_dir( this_run_dir)
if not os.path.exists(intermediate_data_dir):
os.makedirs(intermediate_data_dir)
cma_run_num, cma_intermediate_data_dir = generate_run_dir(get_cma_returns_dirname, intermediate_dir=intermediate_data_dir, n_comp=cma_args.n_comp_to_use)
'''
==========================================================================================
get the pc vectors
==========================================================================================
'''
logger.log("grab final params")
final_file = get_full_param_traj_file_path(traj_params_dir_name, "final")
final_param = pd.read_csv(final_file, header=None).values[0]
final_pca = IncrementalPCA(n_components=2) # for sparse PCA to speed up
theta_file = get_full_param_traj_file_path(traj_params_dir_name, 0)
concat_df = pd.read_csv(theta_file, header=None, chunksize=10000)
tic = time.time()
for chunk in concat_df:
logger.log(f"currnet at : {concat_df._currow}")
if chunk.shape[0] < 2:
logger.log(f"last column too few: {chunk.shape[0]}")
continue
final_pca.partial_fit(chunk.values)
toc = time.time()
logger.log('\nElapsed time computing the chunked PCA {:.2f} s\n'
.format(toc - tic))
logger.log(final_pca.explained_variance_ratio_)
pcs_components = final_pca.components_
first_2_pcs = pcs_components[:2]
mean_param = final_pca.mean_
origin_param = mean_param
theta_file = get_full_param_traj_file_path(traj_params_dir_name, 0)
concat_df = pd.read_csv(theta_file, header=None, chunksize=10000)
proj_coords = do_proj_on_first_n_IPCA(concat_df, first_2_pcs, origin_param)
'''
==========================================================================================
eval all xy coords
==========================================================================================
'''
from stable_baselines.low_dim_analysis.common import plot_contour_trajectory, gen_subspace_coords,do_eval_returns, \
get_allinone_concat_df, do_proj_on_first_n
from stable_baselines.ppo2.run_mujoco import eval_return
last_proj_coord = do_proj_on_first_n(final_param, first_2_pcs, origin_param)
starting_coord = last_proj_coord
tic = time.time()
#TODO better starting locations, record how many samples,
logger.log(f"CMAES STARTING :{starting_coord}")
es = cma.CMAEvolutionStrategy(starting_coord, 5)
total_num_of_evals = 0
total_num_timesteps = 0
mean_rets = []
min_rets = []
max_rets = []
eval_returns = None
optimization_path = []
while total_num_timesteps < cma_args.cma_num_timesteps and not es.stop():
solutions = es.ask()
optimization_path.extend(solutions)
thetas = [np.matmul(coord, first_2_pcs) + origin_param for coord in solutions]
logger.log(f"current time steps num: {total_num_timesteps} total time steps: {cma_args.cma_num_timesteps}")
eval_returns = Parallel(n_jobs=cma_args.cores_to_use) \
(delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for
(i, theta) in enumerate(thetas))
mean_rets.append(np.mean(eval_returns))
min_rets.append(np.min(eval_returns))
max_rets.append(np.max(eval_returns))
total_num_of_evals += len(eval_returns)
total_num_timesteps += cma_args.eval_num_timesteps * len(eval_returns)
logger.log(f"current eval returns: {str(eval_returns)}")
logger.log(f"total timesteps so far: {total_num_timesteps}")
negative_eval_returns = [-r for r in eval_returns]
es.tell(solutions, negative_eval_returns)
es.logger.add() # write data to disc to be plotted
es.disp()
toc = time.time()
logger.log(f"####################################CMA took {toc-tic} seconds")
es_logger = es.logger
if not hasattr(es_logger, 'xmean'):
es_logger.load()
n_comp_used = first_2_pcs.shape[0]
optimization_path_mean = np.vstack((starting_coord, es_logger.xmean[:,5:5+n_comp_used]))
dump_rows_write_csv(cma_intermediate_data_dir, optimization_path_mean, "opt_mean_path")
plot_dir = get_plot_dir(cma_args)
cma_plot_dir = get_cma_plot_dir(plot_dir, cma_args.n_comp_to_use, cma_run_num, origin=origin)
if not os.path.exists(cma_plot_dir):
os.makedirs(cma_plot_dir)
ret_plot_name = f"cma return on {cma_args.n_comp_to_use} dim space of real pca plane, " \
f"explained {np.sum(final_pca.explained_variance_ratio_[:2])}"
plot_cma_returns(cma_plot_dir, ret_plot_name, mean_rets, min_rets, max_rets, show=False)
assert proj_coords.shape[1] == 2
xcoordinates_to_eval, ycoordinates_to_eval = gen_subspace_coords(cma_args, np.vstack((proj_coords, optimization_path_mean)).T)
from stable_baselines.ppo2.run_mujoco import eval_return
thetas_to_eval = [origin_param + x * first_2_pcs[0] + y * first_2_pcs[1] for y in ycoordinates_to_eval for x in
xcoordinates_to_eval]
tic = time.time()
eval_returns = Parallel(n_jobs=-1, max_nbytes='100M') \
(delayed(eval_return)(cma_args, save_dir, theta, cma_args.eval_num_timesteps, i) for (i, theta) in
enumerate(thetas_to_eval))
toc = time.time()
logger.log(f"####################################1st version took {toc-tic} seconds")
plot_contour_trajectory(cma_plot_dir, f"cma redo___{origin}_origin_eval_return_contour_plot", xcoordinates_to_eval,
ycoordinates_to_eval, eval_returns, proj_coords[:, 0], proj_coords[:, 1],
final_pca.explained_variance_ratio_,
num_levels=25, show=False, sub_alg_path=optimization_path_mean.T)
opt_mean_path_in_old_basis = [mean_projected_param.dot(first_2_pcs) + mean_param for mean_projected_param in optimization_path_mean]
distance_to_final = [LA.norm(opt_mean - final_param, ord=2) for opt_mean in opt_mean_path_in_old_basis]
distance_to_final_plot_name = f"cma redo distance_to_final over generations "
plot_2d(cma_plot_dir, distance_to_final_plot_name, np.arange(len(distance_to_final)), distance_to_final, "num generation", "distance_to_final", False)
# plot_3d_trajectory(cma_plot_dir, "end_point_origin_eval_return_3d_plot", xcoordinates_to_eval, ycoordinates_to_eval,
# eval_returns, proj_xcoord, proj_ycoord,
# result["explained_variance_ratio"][:2],
# num_levels=15, show=False)
if __name__ == '__main__':
main()
#TODO Give filenames more info to identify which hyperparameter is the data for
| [
[
[
45,
56
],
[
2185,
2196
]
],
[
[
64,
67
],
[
1537,
1540
],
[
6205,
6208
]
],
[
[
76,
87
],
[
815,
817
],
[
1916,
1918
],
[
2334,
2336
],
[
2381,
2383
],
[
2427,
2429
],
[
3169,
3171
],
[
3276,
3278
],
[
3305,
3307
],
[
6582,
6584
],
[
7001,
7003
],
[
7048,
7050
],
[
7094,
7096
],
[
7836,
7838
],
[
8336,
8338
],
[
8600,
8602
],
[
9980,
9982
]
],
[
[
144,
145
],
[
3556,
3581
],
[
3620,
3639
],
[
3682,
3707
],
[
3737,
3749
],
[
3923,
3946
],
[
4302,
4331
],
[
4525,
4554
],
[
5265,
5294
],
[
8012,
8024
],
[
8054,
8070
]
],
[
[
199,
205
]
],
[
[
207,
214
],
[
9929,
9936
]
],
[
[
222,
241
],
[
7906,
7925
]
],
[
[
243,
259
],
[
3906,
3922
]
],
[
[
261,
284
],
[
5409,
5432
]
],
[
[
286,
308
]
],
[
[
343,
357
],
[
4446,
4460
]
],
[
[
389,
395
],
[
1149,
1155
],
[
1480,
1486
],
[
1992,
1998
],
[
2587,
2593
],
[
2652,
2658
],
[
2928,
2934
],
[
3373,
3379
],
[
4253,
4259
],
[
4710,
4716
],
[
4802,
4808
],
[
4947,
4953
],
[
5052,
5058
],
[
6148,
6154
],
[
6659,
6665
],
[
7254,
7260
],
[
7319,
7325
],
[
7595,
7601
],
[
9127,
9133
]
],
[
[
404,
416
],
[
4381,
4383
],
[
4596,
4598
],
[
5336,
5338
]
],
[
[
451,
454
]
],
[
[
475,
483
],
[
2123,
2131
],
[
6790,
6798
],
[
8917,
8925
]
],
[
[
485,
492
],
[
2177,
2184
],
[
6844,
6851
],
[
8967,
8974
]
],
[
[
516,
529
],
[
855,
858
],
[
874,
877
],
[
904,
907
],
[
1295,
1298
]
],
[
[
537,
541
],
[
1400,
1404
],
[
2912,
2916
],
[
4662,
4666
],
[
4931,
4935
],
[
6068,
6072
],
[
7579,
7583
],
[
8885,
8889
],
[
9111,
9115
]
],
[
[
549,
551
],
[
1088,
1090
],
[
1123,
1125
],
[
3778,
3780
],
[
3825,
3827
],
[
8144,
8146
],
[
8182,
8184
]
],
[
[
597,
614
]
],
[
[
675,
692
],
[
3418,
3435
]
],
[
[
711,
723
],
[
9760,
9762
]
],
[
[
731,
747
],
[
8390,
8406
]
],
[
[
1314,
1320
]
],
[
[
3344,
3348
],
[
10436,
10440
]
]
] |
import tublatexmaker.latex_creater as convert
dict_of_entries = {
"(Bahth fī) uṣūl al-fiqh": {
"displaytitle": "",
"exists": "1",
"fulltext": "(Bahth fī) uṣūl al-fiqh",
"fullurl": "http://144.173.140.108:8080/tub/index.php/(Bahth_f%C4%AB)_u%E1%B9%A3%C5%ABl_al-fiqh",
"namespace": 0,
"printouts": {
"Title (Arabic)": ["بحث في) أصول " "الفقه)"],
"Title (transliterated)": ["(Bahth " "fī) " "uṣūl " "al-fiqh"],
"Has author(s)": [{"fulltext": "Muḥammad Jawād b. Aḥmad"}],
"Death (Hijri)": [1299],
"Death (Gregorian)": [{"timestamp": "-2776982400", "raw": "1/1882"}],
"Death (Hijri) text": ["13th century"],
"Death (Gregorian) text": ["19th century"],
},
}
}
edition = [
{
"City": [
{
"fulltext": "Qum",
"fullurl": "http://144.173.140.108:8080/tub/index.php/Qum",
"namespace": 0,
"exists": "1",
"displaytitle": "Qom",
}
],
"Edition type": ["Modern print"],
"Has a publisher": ["Majmaʿ al-Fikr al-Islāmī"],
"Has editor(s)": ["unknown"],
"Published edition of title": [
{
"fulltext": "al-Fawāʾid al-Ḥāʾiriyya",
"fullurl": "http://144.173.140.108:8080/tub/index.php/al-Faw%C4%81%CA%BEid_al-%E1%B8%A4%C4%81%CA%BEiriyya",
"namespace": 0,
"exists": "1",
"displaytitle": "",
}
],
"Sort title": ["al-Fawaid al-Ḥairiyya"],
"Title (Arabic)": ["الفوائد الحائرية"],
"Title (transliterated)": ["al-Fawāʾid al-Ḥāʾiriyya"],
"Year (Gregorian)": [],
"Year (Gregorian) text": [],
"Year (Hijri)": [],
"Year (Hijri) text": [],
"page_name": "Edition:al-Fawāʾid al-Ḥāʾiriyya",
},
{
"City": [
{
"fulltext": "Qum",
"fullurl": "http://144.173.140.108:8080/tub/index.php/Qum",
"namespace": 0,
"exists": "1",
"displaytitle": "Qom",
}
],
"Edition type": ["Modern print"],
"Has a publisher": ["Majmaʿ al-Fikr al-Islāmī"],
"Has editor(s)": ["unknown"],
"Published edition of title": [
{
"fulltext": "al-Fawāʾid al-Ḥāʾiriyya",
"fullurl": "http://144.173.140.108:8080/tub/index.php/al-Faw%C4%81%CA%BEid_al-%E1%B8%A4%C4%81%CA%BEiriyya",
"namespace": 0,
"exists": "1",
"displaytitle": "",
}
],
"Sort title": ["al-Fawaid al-Ḥairiyya"],
"Title (Arabic)": ["الفوائد الحائرية"],
"Title (transliterated)": ["al-Fawāʾid al-Ḥāʾiriyya"],
"Year (Gregorian)": [],
"Year (Gregorian) text": [],
"Year (Hijri)": [],
"Year (Hijri) text": [],
"page_name": "Edition:al-Fawāʾid al-Ḥāʾiriyya (1415/1994)",
},
]
def create_expected_latex(transliterated_title: str, arabic_title: str) -> str:
return f"""
\\item \\textbf{{{transliterated_title}}}
{arabic_title}
\\newline
Muḥammad b. Faraj al-Ḥimyarī al-Najafī
\\newline
(1059/1649)
\\newline
\\newline
\\textbf{{Description}}
\\newline
A short one-line description.
\\newline
\\newline
\\textbf{{Principle manuscripts}}
\\newline
This manuscript
\\newline
\\newline
\\textbf{{Editions}}
\\newline
This edition.
\\newline
\\newline
\\textbf{{Commentaries}}
\\newline
This commentary.
\\newline
"""
"""
def test_convert_to_entry():
entry_values = list(dict_of_entries.values())[0]["printouts"]
expected = create_expected_latex("(Bahth fī) uṣūl al-fiqh", "بحث في) أصول الفقه)")
assert convert._make_entry(entry_values) == expected
"""
def test_create_dates():
entry = {
"Death (Hijri)": [1299],
"Death (Gregorian)": [{"timestamp": "-2776982400", "raw": "1/1882"}],
"Death (Hijri) text": ["13th century"],
"Death (Gregorian) text": ["19th century"],
}
assert convert._create_dates(entry) == "(13th century/19th century)"
def test_make_editions():
assert (
convert._make_editions_section(edition)
== """
\\textbf{Editions}\n\\begin{itemize}
\\item
\\end{itemize}\n
"""
)
| [
[
[
7,
45
],
[
4249,
4256
],
[
4360,
4367
]
],
[
[
47,
62
]
],
[
[
804,
811
],
[
4391,
4398
]
],
[
[
3082,
3103
]
],
[
[
3986,
4003
]
],
[
[
4317,
4335
]
]
] |
from typing import Sequence
import numpy as np
import xarray
from xarray import DataArray
from xclim.indices.run_length import rle_1d
def get_longest_run_start_index(
arr: DataArray,
window: int = 1,
dim: str = "time",
) -> DataArray:
return xarray.apply_ufunc(
get_index_of_longest_run,
arr,
input_core_dims=[[dim]],
kwargs={"window": window},
vectorize=True,
dask="parallelized",
output_dtypes=[float],
)
def get_index_of_longest_run(arr: Sequence[bool], window: int = 1) -> int:
values, rl, pos = rle_1d(arr)
if not np.any(values) or np.all(values * rl < window): # type:ignore
return 0
index_of_max = np.nanargmax(
np.where(values * rl >= window, rl, np.NaN) # type:ignore
)
return pos[index_of_max] # type:ignore
def get_first_occurrence_index(da: DataArray) -> DataArray:
"""
Return the index of the first True value in the 3D booleans array along
time dimension.
"""
stacked = da.stack(latlon=("lat", "lon"))
res = stacked.argmax("time")
return res.unstack()
| [
[
[
19,
27
],
[
522,
530
]
],
[
[
36,
47
],
[
608,
610
],
[
626,
628
],
[
707,
709
],
[
729,
731
],
[
765,
767
]
],
[
[
55,
61
],
[
261,
267
]
],
[
[
81,
90
],
[
239,
248
],
[
179,
188
],
[
889,
898
],
[
875,
884
]
],
[
[
128,
134
],
[
585,
591
]
],
[
[
141,
168
]
],
[
[
492,
516
],
[
289,
313
]
],
[
[
844,
870
]
]
] |
# Generated by Django 2.2.7 on 2019-11-20 17:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notes', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='note',
name='media',
field=models.ImageField(help_text='Optional image to add to note.', upload_to='media'),
),
]
| [
[
[
71,
81
],
[
108,
118
],
[
222,
232
]
],
[
[
83,
89
],
[
320,
326
]
],
[
[
98,
107
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import abstractmethod
class Sampler(object):
"""Base class for all Samplers. __iter__ is needed no matter whether you use IterableSampler
or Squential sampler, if you want implement your own sampler, make clear what the type is
your Dataset, if IterableDataset(method __iter__ implemented), try to use IterableSampler,
else if you have an IndexDataset(method __getitem__ implemented), your dataset should have
method __len__ implemented.
"""
def __init__(self, data_source):
pass
@abstractmethod
def __iter__(self):
raise NotImplementedError
class IterableSampler(Sampler):
"""Interally samples elements, used for datasets retrieved element by interator.
yield None to act as a placeholder for each iteration
Args:
dataset (Dataset): set to None
"""
def __init__(self):
super(IterableSampler, self).__init__(None)
def __iter__(self):
while True:
yield None
def __len__(self):
return 0
class SequentialSampler(Sampler):
"""Sequentially samples elements, used for datasets retrieved element by index.
Args:
dataset (Dataset): index dataset(implement method __len__) for sampling
"""
def __init__(self, dataset):
self.dataset = dataset
def __iter__(self):
return iter(range(len(self.dataset)))
def __len__(self):
return len(self.dataset)
class BatchSampler(Sampler):
"""yield a mini-batch of indices for SquentialSampler and batch size length of None list for
IterableSampler.
Args:
sampler (Sampler): sampler used for generating batches.
batch_size (int): Size of mini-batch.
drop_last (bool): BatchSampler will drop the last batch if drop_last is True, else
will return the last batch whose size will be less than batch_size
"""
def __init__(self, sampler, batch_size, drop_last=True):
if isinstance(drop_last, bool):
self.drop_last = drop_last
else:
raise ValueError("last_batch only support bool as input")
self.sampler = sampler
self.batch_size = batch_size
self.drop_last = drop_last
def __iter__(self):
batch = []
for idx in self.sampler:
batch.append(idx)
if len(batch) == self.batch_size:
yield batch
batch = []
if len(batch) > 0 and not self.drop_last:
yield batch
def __len__(self):
if self.drop_last:
return len(self.sampler) // self.batch_size
else:
return (len(self.sampler) + self.batch_size - 1) // self.batch_size
| [
[
[
648,
662
],
[
1179,
1193
]
],
[
[
671,
678
],
[
1276,
1283
],
[
1702,
1709
],
[
2109,
2116
]
],
[
[
1260,
1275
],
[
1529,
1544
]
],
[
[
1684,
1701
]
],
[
[
2096,
2108
]
]
] |
from __future__ import annotations
import shutil
from betfairlightweight.resources.streamingresources import MarketDefinition
from betfairlightweight.resources.bettingresources import MarketCatalogue, MarketBook
from betfairlightweight.streaming.listener import StreamListener
import sqlalchemy
from sqlalchemy.sql.expression import ColumnElement
from sqlalchemy.sql.selectable import CTE
from sqlalchemy import create_engine, func, DECIMAL
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.dialects.postgresql import base as psqlbase
from sqlalchemy.dialects.postgresql import json as psqljson
from sqlalchemy.sql.functions import sum as sql_sum
from sqlalchemy_filters.filters import Operator as SqlOperator
from sqlalchemy.orm.query import Query
from queue import Queue
import logging
from typing import Optional, Dict, List, Callable, Any, Tuple, Union, Literal, TypedDict
from os import path
import os
from datetime import datetime, timedelta
import zlib
import yaml
import json
import sys
import dateparser
from myutils import dictionaries, registrar
from ..exceptions import DBException
from .dbfilter import DBFilterHandler
active_logger = logging.getLogger(__name__)
active_logger.setLevel(logging.INFO)
ProcessorKey = Literal['process_in', 'process_out', 'processors']
ProcessorMap = Dict[type, Dict[ProcessorKey, List[str]]]
Processor = Callable[[Any], Any]
db_processors = registrar.Registrar[Processor]()
DB_PROCESSORS: ProcessorMap = {
psqlbase.BYTEA: {
'process_in': [
'prc_compress'
],
'process_out': [
'prc_decompress',
]
},
}
CACHE_PROCESSORS: ProcessorMap = {
psqlbase.BYTEA: {
'process_in': [
'prc_str_encode',
],
'process_out': [
'prc_str_decode'
]
},
psqlbase.TIMESTAMP: {
'process_in': [
'prc_dt_from_str',
],
'process_out': [
'prc_dt_to_str'
]
},
psqlbase.INTERVAL: {
'process_in': [
'prc_td_from_float',
],
'process_out': [
'prc_td_to_float'
]
},
psqljson.JSON: {
'process_in': [
'prc_json_decode',
],
'process_out': [
'prc_json_encode'
]
}
}
@db_processors.register_element
def prc_str_to_dt(data):
return dateparser.parse(data, settings={'DATE_ORDER': 'DMY'}) # use UK day-month-year instead of US month-day-year
@db_processors.register_element
def prc_compress(data):
return zlib.compress(data)
@db_processors.register_element
def prc_decompress(data):
return zlib.decompress(data)
@db_processors.register_element
def prc_str_encode(data):
return data.encode()
@db_processors.register_element
def prc_str_decode(data):
return data.decode()
@db_processors.register_element
def prc_td_to_float(data: timedelta):
return data.total_seconds()
@db_processors.register_element
def prc_td_from_float(data):
return timedelta(seconds=data)
@db_processors.register_element
def prc_dt_from_str(data):
return datetime.fromisoformat(data)
@db_processors.register_element
def prc_dt_to_str(data):
return data.isoformat()
@db_processors.register_element
def prc_json_encode(data):
return json.dumps(data)
@db_processors.register_element
def prc_json_decode(data):
return json.loads(data)
class DBBase:
def __init__(
self,
db_lang=None,
db_user=None,
db_host=None,
db_port=None,
db_name=None,
db_pwd=None,
db_engine=None,
col_processors=None,
engine_kwargs=None
):
self.col_prcs = col_processors or DB_PROCESSORS
self.Base = automap_base()
engine_kwargs = engine_kwargs or {} # TODO - remove?
engine_str = f'+{db_engine}' if db_engine else ''
url = f'{db_lang}{engine_str}://{db_user}:{db_pwd}@{db_host}:{db_port}/{db_name}'
# prioritise engine kwargs if provided - "url" key will override constructed if provided
engine_kwargs = {'url': url} | engine_kwargs
active_logger.info(f'connecting to database with kwargs:\n{engine_kwargs}')
self.engine = create_engine(**engine_kwargs)
self.Base.prepare(self.engine, reflect=True)
self.session = Session(self.engine)
self.tables: Dict[str, Table] = self.Base.metadata.tables
active_logger.info(f'tables found: {list(self.tables.keys())}')
def _validate_tbl(self, tbl_name: str):
if tbl_name not in self.tables:
raise DBException(f'error inserting row, table "{tbl_name}" not found in tables')
if tbl_name not in self.Base.classes:
raise DBException(f'error inserting row, table "{tbl_name}" not found in base')
def _validate_cols(self, tbl_name: str, cols: List[str]):
for col in cols:
if col not in self.tables[tbl_name].columns:
raise DBException(f'column "{col}" not found in table "{tbl_name}"')
def _validate_pkeys(self, tbl_nm: str, pkey_flts: Dict):
tbl_pkeys = tuple(x.name for x in self.tables[tbl_nm].primary_key)
flt_pkeys = tuple(pkey_flts.keys())
if tbl_pkeys != flt_pkeys:
raise DBException(
f'error writing cache, table primary keys "{tbl_pkeys}" does not match specified "{flt_pkeys}"'
)
def apply_basic_filters(self, tbl_nm: str, pkey_flts: Dict) -> Query:
return self.session.query(self.tables[tbl_nm]).filter(
*[self.tables[tbl_nm].columns[k] == v for k, v in pkey_flts.items()]
)
def row_exist(self, tbl_nm: str, pkey_flts: Dict) -> bool:
"""
Determine if row(s) exist in database for a given table
"""
return self.apply_basic_filters(tbl_nm, pkey_flts).count() >= 1
def _value_processors(self, value: Any, tbl_name: str, col: str, prcs: ProcessorMap, prc_type: ProcessorKey) -> Any:
col_type = type(self.tables[tbl_name].columns[col].type)
prc_nms = prcs.get(col_type, {}).get(prc_type)
if prc_nms:
if type(prc_nms) is not list:
raise DBException(f'processors "{prc_type}" for column "{col}" not list')
for i, prc_nm in enumerate(prc_nms):
prc_func = db_processors[prc_nm]
active_logger.info(f'running processor "{prc_type}" #{i}, "{prc_nm}" on column "{col}"')
value_out = prc_func(value)
value = value_out
return value
def _process_columns(self, data: Dict, tbl_name: str, prcs: ProcessorMap, prc_type: ProcessorKey) -> None:
self._validate_tbl(tbl_name)
self._validate_cols(tbl_name, list(data.keys()))
for col in data.keys():
val_in = data[col]
if val_in is None:
active_logger.warning(f'table "{tbl_name}", col "{col}" value is None, skipping processing')
else:
val_out = self._value_processors(val_in, tbl_name, col, prcs, prc_type)
data[col] = val_out
def insert_row(self, tbl_name: str, data: Dict):
active_logger.info(f'inserting row of information into table "{tbl_name}"')
active_logger.info(f'keys passed are:\n'
f'{yaml.dump([str(k) for k in data.keys()])}')
self._process_columns(data, tbl_name, self.col_prcs, 'process_in')
row = self.Base.classes[tbl_name](**data)
self.session.add(row)
self.session.commit()
def read_rows(self, tbl_nm: str, pkey_flts: Dict) -> List[Dict]:
active_logger.info(f'reading rows from table "{tbl_nm}" with filter "{pkey_flts}"')
self._validate_tbl(tbl_nm)
self._validate_pkeys(tbl_nm, pkey_flts)
if not self.row_exist(tbl_nm, pkey_flts):
raise DBException(f'row in table "{tbl_nm}" with filters "{pkey_flts}" does not exist')
sql_rows = self.apply_basic_filters(tbl_nm, pkey_flts).all()
rows = []
for row in sql_rows:
row_dict = {
str(k): v
for k, v in dict(row).items()
} # convert sqlalchemy key objects to str for yaml
self._process_columns(row_dict, tbl_nm, self.col_prcs, 'process_out')
rows.append(row_dict)
return rows
def read_row(self, tbl_nm: str, pkey_flts: Dict) -> Dict:
rows = self.read_rows(tbl_nm, pkey_flts)
if len(rows) != 1:
raise DBException(f'expected 1 row from table "{tbl_nm}" with filters "{pkey_flts}", got {len(rows)}')
return rows[0]
def delete_rows(self, tbl_nm: str, pkey_flts: Dict) -> int:
active_logger.info(f'deleting rows from table "{tbl_nm}" with filters: "{pkey_flts}"')
q = self.apply_basic_filters(tbl_nm, pkey_flts)
ret = q.delete(synchronize_session='fetch')
self.session.commit()
return ret
def order_query(self, query: Query, cols, order_col: str, order_asc: bool):
"""apply ordering based on column of cte"""
if order_col not in cols:
raise DBException(f'cannot order by column "{order_col}", does not exist in CTE')
order_func = sqlalchemy.asc if order_asc else sqlalchemy.desc
return query.order_by(order_func(cols[order_col]))
class DBCache(DBBase):
def __init__(self, cache_root, cache_processors=None, **kwargs):
super().__init__(**kwargs)
self.cache_root = path.abspath(path.expandvars(cache_root))
if not path.isdir(self.cache_root):
active_logger.info(f'creating cache root directory at: "{self.cache_root}"')
os.makedirs(self.cache_root)
else:
active_logger.info(f'existing cache root directory found at: "{self.cache_root}"')
self.cache_prcs = cache_processors or CACHE_PROCESSORS
def cache_tbl(self, tbl_nm) -> str:
return path.join(self.cache_root, tbl_nm)
def cache_dir(self, tbl_nm: str, pkey_flts: Dict) -> str:
return path.join(self.cache_tbl(tbl_nm), *pkey_flts.values())
def cache_col(self, tbl_nm: str, pkey_flts: Dict, col: str) -> str:
return path.join(self.cache_dir(tbl_nm, pkey_flts), col)
def clear_cache(self, tbl_nm: str, pkey_flts: Dict):
active_logger.info(f'clearing cache from table "{tbl_nm}" with filters "{pkey_flts}"')
p = self.cache_dir(tbl_nm, pkey_flts)
if not path.exists(p):
active_logger.info(f'path "{p}" does not exist, skipping')
else:
if not path.isdir(p):
raise DBException(f'path "{p}" is not a directory')
active_logger.info(f'removing cache dir: "{p}"')
os.rmdir(p)
def write_to_cache(self, tbl_nm: str, pkey_flts: Dict, data: Dict):
self._validate_pkeys(tbl_nm, pkey_flts)
self._validate_tbl(tbl_nm)
d = self.cache_dir(tbl_nm, pkey_flts)
active_logger.info(f'writing cache to path: "{d}"')
if path.exists(d):
active_logger.info('path already exists, exiting...')
return
os.makedirs(d, exist_ok=True)
self._process_columns(data, tbl_nm, self.cache_prcs, 'process_out')
for k in pkey_flts.keys():
data.pop(k, None)
for col in data.keys():
if data[col] is None:
active_logger.warning(f'column "{col}" value is none, skipping')
else:
p = self.cache_col(tbl_nm, pkey_flts, col)
active_logger.info(f'writing column "{col}" to file: "{p}"')
with open(p, 'w') as f:
f.write(data[col])
def read_to_cache(self, tbl_nm: str, pkey_flts: Dict):
active_logger.info(f'reading table "{tbl_nm}" row to cache with filters "{pkey_flts}"')
data = self.read_row(tbl_nm, pkey_flts)
self.write_to_cache(tbl_nm, pkey_flts, data)
def insert_from_cache(self, tbl_nm, pkey_flts: Dict):
active_logger.info(f'insert row to table "{tbl_nm}" from cache with filters "{pkey_flts}"')
self._validate_pkeys(tbl_nm, pkey_flts)
self._validate_tbl(tbl_nm)
d = self.cache_dir(tbl_nm, pkey_flts)
active_logger.info(f'getting files from cache directory: "{d}"')
if not path.isdir(d):
raise DBException(f'expected to be directory: "{d}"')
data = pkey_flts.copy()
_, _, files = next(os.walk(d))
self._validate_cols(tbl_nm, files) # files should match column names
for fnm in files:
fp = self.cache_col(tbl_nm, pkey_flts, fnm)
active_logger.info(f'reading column data from file: "{fp}"')
with open(fp, 'r') as f:
data[fnm] = f.read()
self._process_columns(data, tbl_nm, self.cache_prcs, 'process_in')
self.insert_row(tbl_nm, data)
def _cache_pkeys(self, tbl_nm: str):
"""
get list of primary key filters from nested dirs in cache
"""
pkey_names = tuple(x.name for x in self.tables[tbl_nm].primary_key)
def _get_pkeys(_dir: str, _base_pkey: Dict, _lvl) -> List:
if not path.isdir(_dir):
return []
_, dirnames, _ = next(os.walk(_dir))
return [_base_pkey | {pkey_names[_lvl]: d} for d in dirnames]
lvl = 0
flts = [{}]
while lvl < len(pkey_names):
flts_out = []
for f in flts:
d = self.cache_dir(tbl_nm, f)
flts_out += _get_pkeys(d, f, lvl)
flts = flts_out
lvl += 1
return flts
def scan_cache(self, tbl_nm: str, post_insert: Optional[Callable[[str, Dict], None]] = None) -> List[Dict]:
tbl_root = self.cache_tbl(tbl_nm)
active_logger.info(f'scanning for cached rows for table "{tbl_nm}" to insert in "{tbl_root}"')
flts = self._cache_pkeys(tbl_nm)
added_pkeys = []
for pkey_filters in flts:
if self.row_exist(tbl_nm, pkey_filters):
active_logger.info(f'row "{pkey_filters}" already exists in database, skipping...')
else:
self.insert_from_cache(tbl_nm, pkey_filters)
added_pkeys.append(pkey_filters)
if post_insert is not None:
post_insert(tbl_nm, pkey_filters)
return added_pkeys
def wipe_cache(self) -> Tuple[int, int]:
active_logger.info(f'clearing cache root at "{self.cache_root}"')
_, dirnames, filenames = next(os.walk(self.cache_root))
for fnm in filenames:
p = path.join(self.cache_root, fnm)
os.remove(p)
for dnm in dirnames:
p = path.join(self.cache_root, dnm)
shutil.rmtree(p)
return len(filenames), len(dirnames)
class QueryFilter(TypedDict):
value: object
field: str
op: str
def apply_filter_spec(tbl: Table, q: Query, filters_spec: List[QueryFilter]) -> Query:
"""sqlalchemy_filters `apply_filters` function doesn't work with Sqlalchemy V1.14 so i've bodged it myself until
they sort it out"""
conditions = [
SqlOperator.OPERATORS[f['op']](tbl.columns[f['field']], f['value'])
for f in filters_spec
]
return q.filter(*conditions)
class BettingDB:
"""
Betting database handler
Manages session that connects to remote SQL ase for querying
"Historic" markets to are files downloaded directly from betfair's historical data website
"Recorded" markets are files from betfair markets recorded through a python script locally, which are recorded
with the accompanying market catalogue file
"""
def __init__(self, **kwargs):
self._dbc = DBCache(**kwargs)
def read(self, tbl_nm: str, pkey_flts: Dict):
return self._dbc.read_row(tbl_nm, pkey_flts)
def close(self):
self._dbc.session.close()
def meta_serialise(self, market_info: Dict) -> None:
"""run caching serialisation on market information retrieved from 'marketmeta' database"""
self._dbc._process_columns(market_info, 'marketmeta', self._dbc.cache_prcs, 'process_out')
def meta_de_serialise(self, market_info: Dict) -> None:
"""run caching de-serialisation on market information that has been serialised"""
self._dbc._process_columns(market_info, 'marketmeta', self._dbc.cache_prcs, 'process_in')
@staticmethod
def get_meta(first_book: MarketBook, cat: MarketCatalogue = None) -> Dict:
"""
Get metadata corresponding to the "Meta" table in the betting database for a given betfair Market
Parameters
----------
first_book : first MarketBook for betfair Market
cat : if market is recorded and not historic, this needs to be passed to get
venue and runner names
Returns dict of metadata
-------
"""
mktdef: MarketDefinition = first_book.market_definition
mktid = first_book.market_id
init_time = first_book.publish_time
pre_off = mktdef.market_time - init_time
metadata = {
'market_id': mktid,
'sport_id': mktdef.event_type_id,
'market_time': mktdef.market_time,
'market_type': mktdef.market_type,
'betting_type': mktdef.betting_type,
'country_code': mktdef.country_code,
'event_id': mktdef.event_id,
'event_name': mktdef.event_name, # historical
'timezone': mktdef.timezone,
'venue': mktdef.venue,
'init_time': init_time,
'pre_off': pre_off,
'format': 'historic',
}
if cat is not None:
metadata['event_name'] = cat.event.name
metadata['venue'] = cat.event.venue
metadata['format'] = 'recorded'
return metadata
@staticmethod
def get_first_book(file_path: str) -> Optional[MarketBook]:
"""
read the first line in a historical/streaming file and get the MarketBook parsed object, without reading or
processing the rest of the file
"""
with open(file_path) as f:
l = f.readline()
q = Queue()
# stop it winging about stream latency by using infinity as max latency
listener = StreamListener(q, max_latency=sys.float_info.max)
listener.register_stream(0, 'marketSubscription')
listener.on_data(l)
return listener.output_queue.get()[0]
def insert_market_meta(self, market_id: str):
active_logger.info(f'creating metadata database entry for market "{market_id}"')
pkey_flts = {'market_id': market_id}
self._dbc.read_to_cache('marketstream', pkey_flts)
stream_path = self._dbc.cache_col('marketstream', pkey_flts, 'stream_updates')
bk = self.get_first_book(stream_path)
cat = None
cat_path = self._dbc.cache_col('marketstream', pkey_flts, 'catalogue')
if path.exists(cat_path):
if path.getsize(cat_path):
with open(cat_path, 'r') as f:
cat_dict = json.loads(f.read())
try:
cat = MarketCatalogue(**cat_dict)
except TypeError as e:
raise DBException(f'failed to create market catalogue: {e}')
if cat is None:
names = {r.selection_id: r.name for r in bk.market_definition.runners}
else:
names = {r.selection_id: r.runner_name for r in cat.runners}
for runner_id, name in names.items():
active_logger.info(f'creating row for market "{market_id}", runner "{runner_id}", name "{name}"')
self._dbc.insert_row('marketrunners', {
'market_id': market_id,
'runner_id': runner_id,
'runner_name': name
})
meta_data = self.get_meta(bk, cat)
self._dbc.insert_row('marketmeta', meta_data)
def insert_strategy_runners(self, pkey_filters, profit_func: Callable[[str], Dict]):
p = self._dbc.cache_col('strategyupdates', pkey_filters, 'strategy_updates')
if not path.isfile(p):
raise DBException(f'expected strategy update file at "{p}"')
runner_profits = profit_func(p)
for k, v in runner_profits.items():
self._dbc.insert_row('strategyrunners', pkey_filters | {
'runner_id': k,
'profit': v
})
def wipe_cache(self) -> Tuple[int, int]:
return self._dbc.wipe_cache()
def scan_mkt_cache(self) -> List[Dict]:
"""
scan marketstream cache files - insert into database if not exist and add corresponding marketmeta and runner rows
"""
def mkt_post_insert(tbl_name, pkey_flts):
if tbl_name != 'marketstream':
raise DBException(f'expected "marketstream" table')
self.insert_market_meta(pkey_flts['market_id'])
return self._dbc.scan_cache('marketstream', mkt_post_insert)
def scan_strat_cache(self, profit_func: Callable[[str], Dict]) -> List[Dict]:
"""
scan strategy cache files - insert into database if not exist
"""
def strat_post_insert(tbl_nm, pkey_flts):
self.insert_strategy_runners(pkey_flts, profit_func)
added_keys = self._dbc.scan_cache('strategymeta')
self._dbc.scan_cache('strategyupdates', strat_post_insert)
return added_keys
def write_strat_info(self, strategy_id, type: str, name: str, exec_time: datetime, info: dict):
data = {
'type': type,
'name': name,
'exec_time': exec_time,
'info': info
}
self._dbc.write_to_cache(
tbl_nm='strategymeta',
pkey_flts={
'strategy_id': str(strategy_id)
},
data=data
)
def path_mkt_usr_updates(self, market_id) -> str:
return self._dbc.cache_col(
tbl_nm='marketstream',
pkey_flts={
'market_id': market_id
},
col='user_data'
)
def path_mkt_cat(self, market_id) -> str:
return self._dbc.cache_col(
tbl_nm='marketstream',
pkey_flts={
'market_id': market_id
},
col='catalogue',
)
def path_mkt_updates(self, market_id) -> str:
return self._dbc.cache_col(
tbl_nm='marketstream',
pkey_flts={
'market_id': market_id
},
col='stream_updates',
)
def path_strat_features(self, market_id, strategy_id) -> str:
return self._dbc.cache_col(
tbl_nm='strategyupdates',
pkey_flts={
'strategy_id': str(strategy_id),
'market_id': market_id,
},
col='strategy_features'
)
def path_strat_updates(self, market_id, strategy_id) -> str:
return self._dbc.cache_col(
tbl_nm='strategyupdates',
pkey_flts={
'strategy_id': str(strategy_id),
'market_id': market_id
},
col='strategy_updates'
)
def paths_market_updates(self, filter_spec: List[QueryFilter], limit=200):
tbl = self._dbc.tables['marketmeta']
q = self._dbc.session.query(tbl)
q_flt = apply_filter_spec(tbl, q, filter_spec)
rows = q_flt.limit(limit).all()
update_paths = []
for row in rows:
mkt_flt = {'market_id': row.market_id}
self._dbc.read_to_cache('marketstream', mkt_flt)
p = self._dbc.cache_col('marketstream', mkt_flt, 'stream_updates')
if not path.isfile(p):
raise DBException(f'expected file at stream update path: "{p}"')
update_paths.append(p)
return update_paths
def rows_runners(self, market_id, strategy_id) -> List[Dict]:
"""
get filters rows of runners, joined with profit column from strategy
"""
sr = self._dbc.tables['strategyrunners']
cte_strat = self._dbc.session.query(
sr.columns['runner_id'],
sr.columns['profit'].label('runner_profit')
).filter(
sr.columns['strategy_id'] == strategy_id,
sr.columns['market_id'] == market_id
).cte()
rn = self._dbc.tables['marketrunners']
rows = self._dbc.session.query(
rn,
cte_strat.c['runner_profit'],
).join(
cte_strat,
rn.columns['runner_id'] == cte_strat.c['runner_id'],
isouter=True,
).filter(
rn.columns['market_id'] == market_id
).all()
return [dict(row) for row in rows]
def rows_market(self, cte, col_names, max_rows, order_col=None, order_asc=False) -> List[Dict]:
cols = [cte.c[nm] for nm in col_names]
q = self._dbc.session.query(*cols)
if order_col is not None:
q = self._dbc.order_query(q, cte.c, order_col, order_asc)
rows = q.limit(max_rows).all()
return [dict(row) for row in rows]
# TODO - implement in UI
def rows_strategy(self, max_rows) -> List[Dict]:
shn = self._dbc.session
sm = self._dbc.tables['strategymeta']
sr = self._dbc.tables['strategyrunners']
p_cte = shn.query(
sr.columns['strategy_id'],
func.sum(sr.columns['profit']).label('total_profit')
).group_by(sr.columns['strategy_id']).cte()
m_cte = shn.query(sr.c['strategy_id'], sr.c['market_id']).distinct().cte()
m_cte = shn.query(
m_cte.c['strategy_id'],
func.count(m_cte.c['market_id']).label('n_markets')
).group_by(m_cte.c['strategy_id']).cte()
q = shn.query(sm, p_cte.c['total_profit'], m_cte.c['n_markets']).join(
p_cte, sm.c['strategy_id'] == p_cte.c['strategy_id'], isouter=True
).join(
m_cte, sm.c['strategy_id'] == m_cte.c['strategy_id'], isouter=True
)
return [dict(row) for row in q.limit(max_rows).all()]
def filters_labels(self, filters: DBFilterHandler, cte) -> List[List[Dict[str, Any]]]:
return filters.filters_labels(self._dbc.session, self._dbc.tables, cte)
def cte_count(self, cte: CTE) -> int:
return self._dbc.session.query(cte).count()
def strategy_count(self) -> int:
return self._dbc.session.query(self._dbc.tables['strategymeta']).count()
def strategy_delete(self, strategy_id) -> Tuple[int, int ,int]:
strategy_id = str(strategy_id)
active_logger.info(f'attempting to delete strategy: "{strategy_id}"')
pkey_flt = {'strategy_id': strategy_id}
if not self._dbc.row_exist('strategymeta', pkey_flt):
raise DBException(f'strategy does not exist, using filters: "{pkey_flt}"')
if not strategy_id:
raise DBException(f'trying to delete strategy where ID passed is blank!')
rows = self._dbc.read_rows('strategymeta', pkey_flt)
if len(rows) != 1:
raise DBException(f'expected 1 strategy meta row with filter: "{pkey_flt}"')
n_runners = self._dbc.delete_rows('strategyrunners', pkey_flt)
active_logger.info(f'deleted {n_runners} rows from "strategyrunners" table')
n_mkts = self._dbc.delete_rows('strategyupdates', pkey_flt)
active_logger.info(f'deleted {n_mkts} rows from "strategyupdates" table')
n_meta = self._dbc.delete_rows('strategymeta', pkey_flt)
active_logger.info(f'deleted {n_meta} rows from "strategymeta" table')
return n_meta, n_mkts, n_runners
def filters_strat_cte(self, strat_filters: DBFilterHandler) -> CTE:
"""
get filtered database strategy common table expression (CTE)
"""
strat_meta = self._dbc.tables['strategymeta']
q = self._dbc.session.query(strat_meta).filter(
*strat_filters.filters_conditions(strat_meta)
)
return q.cte()
def filters_mkt_cte(self, strategy_id, column_filters: List[ColumnElement]) -> CTE:
meta = self._dbc.tables['marketmeta']
sr = self._dbc.tables['strategyrunners']
if strategy_id:
strat_cte = self._dbc.session.query(
sr.columns['market_id'],
sql_sum(sr.columns['profit']).label('market_profit')
).filter(
sr.columns['strategy_id'] == strategy_id
).group_by(
sr.columns['market_id']
).cte()
q = self._dbc.session.query(
meta,
strat_cte.c['market_profit']
).join(
strat_cte,
meta.columns['market_id'] == strat_cte.c['market_id']
)
else:
q = self._dbc.session.query(
meta,
sqlalchemy.null().label('market_profit')
)
q = q.filter(*column_filters)
return q.cte()
def cache_strat_updates(self, strategy_id, market_id):
pkey_flts = {
'strategy_id': str(strategy_id),
'market_id': market_id
}
self._dbc.read_to_cache('strategyupdates', pkey_flts)
def cache_strat_meta(self, strategy_id):
pkey_flt = {'strategy_id': strategy_id}
self._dbc.read_to_cache('strategymeta', pkey_flt)
def cache_mkt_stream(self, market_id):
pkey_flt = {'market_id': market_id}
self._dbc.read_to_cache('marketstream', pkey_flt)
def read_mkt_meta(self, market_id) -> Dict:
pkey_flt = {'market_id': market_id}
return self._dbc.read_row('marketmeta', pkey_flt)
def _lost_ids(self, t1: Table, t2, id_col: str):
"""
get a query for where table `t1` has rows that are not reflected in table `t2`, joined by a column with name
specified by `id_col`. table `t2` can be a 1-to-1 mapping of rows from `t1` or 1 to many.
E.g. if `t1` had an id column of 'sample_id_col' and some values [1,2,3], and `t2` had hundreds of rows but
only with 'sample_id_col' equal to 1 or 2, then the function would return the 'sample_id_col' value of 3
"""
cte = self._dbc.session.query(
t2.columns[id_col]
).group_by(t2.columns[id_col]).cte()
return self._dbc.session.query(
t1.columns[id_col],
cte.c[id_col]
).join(
cte,
t1.columns[id_col] == cte.c[id_col],
isouter=True
).filter(cte.c[id_col] == None)
def health_check(self):
mkt_stm = self._dbc.tables['marketstream']
mkt_met = self._dbc.tables['marketmeta']
mkt_run = self._dbc.tables['marketrunners']
# market stream/meta row counts
n_mkt = self._dbc.session.query(mkt_stm).count()
active_logger.info(f'{n_mkt} market stream rows')
n_met = self._dbc.session.query(mkt_met).count()
active_logger.info(f'{n_met} market meta rows')
# market stream rows without corresponding market meta row
q = self._lost_ids(mkt_stm, mkt_met, 'market_id')
for row in q.all():
active_logger.error(f'market "{row[0]}" does not have a meta row')
# market runner meta row count
nrun = self._dbc.session.query(mkt_run).count()
active_logger.info(f'{nrun} market runner rows')
# market stream rows without any corresponding runner rows
q = self._lost_ids(mkt_stm, mkt_run, 'market_id')
for row in q.all():
active_logger.error(f'market "{row[0]}" does not have any runner rows')
srt_met = self._dbc.tables['strategymeta']
srt_run = self._dbc.tables['strategyrunners']
srt_udt = self._dbc.tables['strategyupdates']
# strategy meta & strategy market update row counts
n_srtmet = self._dbc.session.query(srt_met).count()
active_logger.info(f'{n_srtmet} strategy meta rows found')
n_srtudt = self._dbc.session.query(srt_udt).count()
active_logger.info(f'{n_srtudt} strategy market update rows found')
# strategy meta rows without any strategy update rows
q = self._lost_ids(srt_met, srt_udt, 'strategy_id')
for row in q.all():
active_logger.error(f'strategy "{row[0]}" does not have any market updates')
# strategy runner row count
n_srtrun = self._dbc.session.query(srt_run).count()
active_logger.info(f'{n_srtrun} strategy runner rows found')
# strategy meta rows without any strategy runner rows
q = self._lost_ids(srt_met, srt_run, 'strategy_id')
for row in q.all():
active_logger.error(f'strategy "{row[0]}" does not have any runner rows')
| [
[
[
23,
34
]
],
[
[
43,
49
],
[
14911,
14917
]
],
[
[
110,
126
],
[
17076,
17092
]
],
[
[
185,
200
],
[
16634,
16649
],
[
19363,
19378
]
],
[
[
202,
212
],
[
16617,
16627
],
[
18102,
18112
]
],
[
[
263,
277
],
[
18479,
18493
]
],
[
[
285,
295
],
[
9365,
9375
],
[
9398,
9408
],
[
29155,
29165
]
],
[
[
334,
347
],
[
28356,
28369
]
],
[
[
386,
389
],
[
26575,
26578
],
[
27992,
27995
],
[
28375,
28378
]
],
[
[
413,
426
],
[
4348,
4361
]
],
[
[
428,
432
],
[
25684,
25688
],
[
25947,
25951
]
],
[
[
434,
441
]
],
[
[
469,
476
],
[
4455,
4462
]
],
[
[
511,
516
],
[
4507,
4512
],
[
15079,
15084
],
[
29984,
29989
]
],
[
[
552,
564
],
[
3867,
3879
]
],
[
[
608,
624
],
[
1550,
1558
],
[
1745,
1753
],
[
1903,
1911
],
[
2065,
2073
]
],
[
[
668,
684
],
[
2230,
2238
]
],
[
[
722,
736
],
[
28606,
28613
]
],
[
[
776,
799
],
[
15307,
15318
]
],
[
[
833,
838
],
[
5602,
5607
],
[
9117,
9122
],
[
15132,
15137
],
[
15089,
15094
]
],
[
[
857,
862
],
[
18371,
18376
]
],
[
[
870,
877
],
[
1240,
1247
],
[
1291,
1298
]
],
[
[
897,
905
],
[
13823,
13831
],
[
18093,
18101
]
],
[
[
907,
911
],
[
1387,
1391
],
[
1398,
1402
],
[
4497,
4501
],
[
5216,
5220
],
[
5593,
5597
],
[
5812,
5816
],
[
6721,
6725
],
[
7281,
7285
],
[
7743,
7747
],
[
7729,
7733
],
[
8546,
8550
],
[
8537,
8541
],
[
8817,
8821
],
[
10156,
10160
],
[
10289,
10293
],
[
10429,
10433
],
[
10934,
10938
],
[
10946,
10950
],
[
11866,
11870
],
[
12122,
12126
],
[
13877,
13881
],
[
13847,
13851
],
[
15948,
15952
],
[
16107,
16111
],
[
16366,
16370
],
[
16661,
16665
],
[
20231,
20235
],
[
20778,
20782
],
[
21298,
21302
],
[
21283,
21287
],
[
24185,
24189
],
[
25113,
25117
],
[
25472,
25476
],
[
26447,
26451
],
[
29847,
29851
],
[
13272,
13276
]
],
[
[
913,
917
],
[
1417,
1421
],
[
4982,
4986
],
[
7738,
7742
],
[
13872,
13876
],
[
15110,
15114
],
[
20773,
20777
],
[
21293,
21297
],
[
23492,
23496
],
[
24180,
24184
],
[
25108,
25112
],
[
25467,
25471
],
[
26437,
26441
],
[
26442,
26446
],
[
28351,
28355
],
[
13287,
13291
]
],
[
[
919,
927
],
[
1442,
1450
],
[
13832,
13840
],
[
20215,
20223
],
[
21267,
21275
]
],
[
[
929,
932
],
[
1452,
1455
],
[
1458,
1461
],
[
6104,
6107
],
[
6027,
6030
],
[
26457,
26460
]
],
[
[
934,
939
],
[
14564,
14569
],
[
20685,
20690
],
[
26806,
26811
]
],
[
[
941,
946
]
],
[
[
948,
955
],
[
1321,
1328
]
],
[
[
957,
966
],
[
14993,
15002
]
],
[
[
982,
986
],
[
9628,
9632
],
[
9641,
9645
],
[
9685,
9689
],
[
10072,
10076
],
[
10185,
10189
],
[
10328,
10332
],
[
10592,
10596
],
[
10712,
10716
],
[
11153,
11157
],
[
12446,
12450
],
[
14765,
14769
],
[
14867,
14871
],
[
19147,
19151
],
[
19185,
19189
],
[
20339,
20343
],
[
23965,
23969
],
[
13312,
13316
]
],
[
[
994,
996
],
[
9815,
9817
],
[
10868,
10870
],
[
11262,
11264
],
[
12586,
12588
],
[
14693,
14695
],
[
14809,
14811
],
[
13390,
13392
]
],
[
[
1018,
1026
],
[
3191,
3199
],
[
21744,
21752
]
],
[
[
1028,
1037
],
[
2977,
2986
],
[
3095,
3104
]
],
[
[
1045,
1049
],
[
2634,
2638
],
[
2725,
2729
]
],
[
[
1057,
1061
],
[
7451,
7455
]
],
[
[
1069,
1073
],
[
3379,
3383
],
[
3468,
3472
],
[
19287,
19291
]
],
[
[
1081,
1084
],
[
18509,
18512
]
],
[
[
1092,
1102
],
[
2456,
2466
]
],
[
[
1124,
1136
]
],
[
[
1138,
1147
],
[
1479,
1488
]
],
[
[
1173,
1184
],
[
4717,
4728
],
[
4857,
4868
],
[
5098,
5109
],
[
5395,
5406
],
[
6313,
6324
],
[
7993,
8004
],
[
8646,
8657
],
[
9268,
9279
],
[
10749,
10760
],
[
12479,
12490
],
[
19464,
19475
],
[
20373,
20384
],
[
24003,
24014
],
[
27073,
27084
],
[
27188,
27199
],
[
27362,
27373
],
[
21047,
21058
]
],
[
[
1207,
1222
],
[
26412,
26427
],
[
27972,
27987
]
],
[
[
1224,
1237
],
[
1268,
1281
],
[
4250,
4263
],
[
4550,
4563
],
[
6495,
6508
],
[
6999,
7012
],
[
7296,
7309
],
[
7380,
7393
],
[
7758,
7771
],
[
8839,
8852
],
[
9726,
9739
],
[
9870,
9883
],
[
10444,
10457
],
[
10620,
10633
],
[
10807,
10820
],
[
11090,
11103
],
[
11181,
11194
],
[
11515,
11528
],
[
11673,
11686
],
[
11881,
11894
],
[
12137,
12150
],
[
12366,
12379
],
[
12770,
12783
],
[
13934,
13947
],
[
14198,
14211
],
[
14589,
14602
],
[
18720,
18733
],
[
19771,
19784
],
[
26875,
26888
],
[
27512,
27525
],
[
27665,
27678
],
[
27812,
27825
],
[
31126,
31139
],
[
31241,
31254
],
[
31455,
31468
],
[
31626,
31639
],
[
31841,
31854
],
[
32202,
32215
],
[
32329,
32342
],
[
32560,
32573
],
[
32742,
32755
],
[
32966,
32979
]
],
[
[
1306,
1318
],
[
1403,
1415
],
[
6087,
6099
],
[
6772,
6784
]
],
[
[
1372,
1384
],
[
1529,
1541
],
[
1724,
1736
],
[
6063,
6075
],
[
6748,
6760
]
],
[
[
1430,
1439
],
[
1499,
1508
]
],
[
[
1463,
1476
],
[
2389,
2402
],
[
2568,
2581
],
[
2657,
2670
],
[
2750,
2763
],
[
2835,
2848
],
[
2920,
2933
],
[
3024,
3037
],
[
3122,
3135
],
[
3223,
3236
],
[
3310,
3323
],
[
3399,
3412
],
[
6457,
6470
]
],
[
[
1514,
1527
],
[
1529,
1541
],
[
3833,
3846
]
],
[
[
1706,
1722
],
[
1724,
1736
],
[
9999,
10015
]
],
[
[
2424,
2437
]
],
[
[
2603,
2615
]
],
[
[
2692,
2706
]
],
[
[
2785,
2799
]
],
[
[
2870,
2884
]
],
[
[
2955,
2970
]
],
[
[
3059,
3076
]
],
[
[
3157,
3172
]
],
[
[
3258,
3271
]
],
[
[
3345,
3360
]
],
[
[
3434,
3449
]
],
[
[
3493,
3499
],
[
9489,
9495
]
],
[
[
9481,
9488
],
[
15886,
15893
]
],
[
[
14981,
14992
],
[
15115,
15126
],
[
23497,
23508
]
],
[
[
15056,
15073
],
[
23625,
23642
]
],
[
[
15452,
15461
]
]
] |
"""rest_vk_api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url
from main import views
urlpatterns = [
url(r'^users/(?P<user_ids>[0-9]+).*', views.get_user, name='get_users'),
url(r'^status$', views.status, name='status'),
]
| [
[
[
664,
667
],
[
712,
715
],
[
789,
792
]
],
[
[
685,
690
],
[
750,
755
],
[
806,
811
]
],
[
[
692,
703
]
]
] |
#!/usr/bin/env python
from testWrt import testsetup
from testWrt.lib import SSHOpenWrt
if __name__ == "__main__":
ts = testsetup.create_generic()
device = SSHOpenWrt(hostname="192.168.1.1", password="test")
ret = device.portscan(22)
print(ret)
| [
[
[
43,
52
],
[
125,
134
]
],
[
[
77,
87
],
[
165,
175
]
],
[
[
120,
122
]
],
[
[
156,
162
],
[
227,
233
]
],
[
[
221,
224
],
[
257,
260
]
]
] |
import csv
import logging
from datetime import datetime, timedelta
from typing import Any, Dict, Optional
from scrapy import Spider
from sqlalchemy.dialects.postgresql import insert
from opennem.core.normalizers import normalize_duid
from opennem.db import SessionLocal, get_database_engine
from opennem.db.models.opennem import FacilityScada
from opennem.pipelines.nem.opennem import unit_scada_generate_facility_scada
from opennem.schema.network import NetworkWEM
from opennem.utils.dates import parse_date
from opennem.utils.pipelines import check_spider_pipeline
logger = logging.getLogger(__name__)
class WemStoreFacilityScada(object):
@check_spider_pipeline
def process_item(
self, item: Dict[str, Any], spider: Optional[Spider] = None
) -> Dict[str, Any]:
if "content" not in item:
logger.error("No item content slipping store facility scada")
return item
csvreader = csv.DictReader(item["content"].split("\n"))
item["table_schema"] = FacilityScada
item["update_fields"] = ["generated", "eoi_quantity"]
item["records"] = unit_scada_generate_facility_scada(
csvreader,
spider,
interval_field="Trading Interval",
facility_code_field="Facility Code",
power_field="EOI Quantity (MW)",
energy_field="Energy Generated (MWh)",
network=NetworkWEM,
)
item["content"] = None
return item
class WemStoreFacilityIntervals(object):
@check_spider_pipeline
def process_item(
self, item: Dict[str, Any], spider: Optional[Spider] = None
) -> Dict[str, Any]:
if "content" not in item:
logger.error("No item content slipping store facility scada")
return item
csvreader = csv.DictReader(item["content"].split("\n"))
item["table_schema"] = FacilityScada
item["update_fields"] = ["generated"]
item["records"] = unit_scada_generate_facility_scada(
csvreader,
spider,
interval_field="PERIOD",
facility_code_field="FACILITY_CODE",
power_field="ACTUAL_MW",
network=NetworkWEM,
)
item["content"] = None
return item
class WemStoreLiveFacilityScada(object):
"""
Store live facility scada data.
@NOTE no longer used
"""
@check_spider_pipeline
def process_item(self, item: Dict[str, Any], spider: Optional[Spider] = None) -> int:
session = SessionLocal()
engine = get_database_engine()
csvreader = csv.DictReader(item["content"].split("\n"))
records_to_store = []
last_asat = None
for row in csvreader:
# @TODO MAX_GEN_CAPACITY
# facility_capacity = row["MAX_GEN_CAPACITY"]
if row["AS_AT"] != "":
last_asat = parse_date(row["AS_AT"], network=NetworkWEM, dayfirst=False)
if not last_asat or type(last_asat) is not datetime:
logger.error("Invalid row or no datetime")
continue
# We need to pivot the table since columns are time intervals
for i in range(1, 48):
column = f"I{i:02}"
if column not in row:
logger.error("Do not have data for interval {}".format(column))
continue
if i > 0:
interval = last_asat - timedelta(minutes=(i - 1) * 30)
else:
interval = last_asat
facility_code = normalize_duid(row["FACILITY_CODE"])
val = None
try:
val = float(row[column]) / 2 or None
except ValueError:
pass
records_to_store.append(
{
"created_by": spider.name,
"network_id": "WEM",
"trading_interval": interval,
"facility_code": facility_code,
"eoi_quantity": val,
}
)
stmt = insert(FacilityScada).values(records_to_store)
stmt.bind = engine
stmt = stmt.on_conflict_do_update(
index_elements=["trading_interval", "network_id", "facility_code", "is_forecast"],
set_={
# "updated_by": stmt.excluded.created_by,
"eoi_quantity": stmt.excluded.eoi_quantity,
},
)
try:
session.execute(stmt)
session.commit()
except Exception as e:
logger.error("Error inserting records")
logger.error(e)
finally:
session.close()
return len(records_to_store)
| [
[
[
7,
10
],
[
941,
944
],
[
1822,
1825
],
[
2614,
2617
]
],
[
[
18,
25
],
[
579,
586
]
],
[
[
47,
55
],
[
3022,
3030
]
],
[
[
57,
66
],
[
3485,
3494
]
],
[
[
86,
89
],
[
782,
785
],
[
725,
728
],
[
1663,
1666
],
[
1606,
1609
],
[
2473,
2476
]
],
[
[
91,
95
],
[
772,
776
],
[
715,
719
],
[
1653,
1657
],
[
1596,
1600
],
[
2463,
2467
]
],
[
[
97,
105
],
[
739,
747
],
[
1620,
1628
],
[
2487,
2495
]
],
[
[
126,
132
],
[
748,
754
],
[
1629,
1635
],
[
2496,
2502
]
],
[
[
176,
182
],
[
4188,
4194
]
],
[
[
221,
235
],
[
3613,
3627
]
],
[
[
259,
271
],
[
2539,
2551
]
],
[
[
273,
292
],
[
2571,
2590
]
],
[
[
331,
344
],
[
1017,
1030
],
[
1898,
1911
],
[
4195,
4208
]
],
[
[
387,
421
],
[
1119,
1153
],
[
1984,
2018
]
],
[
[
457,
467
],
[
1410,
1420
],
[
2206,
2216
],
[
2938,
2948
]
],
[
[
500,
510
],
[
2905,
2915
]
],
[
[
547,
568
],
[
651,
672
],
[
1532,
1553
],
[
2408,
2429
]
],
[
[
570,
576
],
[
834,
840
],
[
1715,
1721
],
[
3048,
3054
],
[
3322,
3328
],
[
4682,
4688
],
[
4734,
4740
]
],
[
[
615,
636
]
],
[
[
1492,
1517
]
],
[
[
2288,
2313
]
]
] |
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.fields import BooleanField, TextAreaField
from wtforms.fields.html5 import URLField
from wtforms.fields.simple import HiddenField, StringField
from wtforms.validators import DataRequired, Optional, ValidationError
from indico.core.db import db
from indico.core.db.sqlalchemy.protection import ProtectionMode
from indico.modules.attachments.models.folders import AttachmentFolder
from indico.modules.attachments.util import get_default_folder_names
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.forms.base import IndicoForm, generated_data
from indico.web.forms.fields import (AccessControlListField, EditableFileField, FileField, IndicoDateField,
IndicoRadioField, IndicoSelectMultipleCheckboxField)
from indico.web.forms.validators import HiddenUnless, UsedIf
from indico.web.forms.widgets import SwitchWidget, TypeaheadWidget
class AttachmentFormBase(IndicoForm):
protected = BooleanField(_("Protected"), widget=SwitchWidget())
folder = QuerySelectField(_("Folder"), allow_blank=True, blank_text=_("No folder selected"), get_label='title',
description=_("Adding materials to folders allow grouping and easier permission "
"management."))
acl = AccessControlListField(_("Access control list"), [UsedIf(lambda form, field: form.protected.data)],
allow_groups=True, allow_external_users=True, allow_event_roles=True,
allow_category_roles=True, allow_registration_forms=True,
event=lambda form: form.event,
default_text=_('Restrict access to this material'),
description=_("The list of users and groups allowed to access the material"))
def __init__(self, *args, **kwargs):
linked_object = kwargs.pop('linked_object')
self.event = getattr(linked_object, 'event', None) # not present in categories
super(AttachmentFormBase, self).__init__(*args, **kwargs)
self.folder.query = (AttachmentFolder
.find(object=linked_object, is_default=False, is_deleted=False)
.order_by(db.func.lower(AttachmentFolder.title)))
@generated_data
def protection_mode(self):
return ProtectionMode.protected if self.protected.data else ProtectionMode.inheriting
class EditAttachmentFormBase(AttachmentFormBase):
title = StringField(_("Title"), [DataRequired()])
description = TextAreaField(_("Description"))
class AddAttachmentFilesForm(AttachmentFormBase):
files = FileField(_("Files"), multiple_files=True)
def _get_file_data(attachment):
file = attachment.file
return {
'url': url_for('attachments.download', attachment, filename=file.filename, from_preview='1'),
'filename': file.filename,
'size': file.size,
'content_type': file.content_type
}
class EditAttachmentFileForm(EditAttachmentFormBase):
file = EditableFileField(_("File"), add_remove_links=False, get_metadata=_get_file_data,
description=_("Already uploaded file. Replace it by adding a new file."))
class AttachmentLinkFormMixin(object):
title = StringField(_("Title"), [DataRequired()])
link_url = URLField(_("URL"), [DataRequired()])
class AddAttachmentLinkForm(AttachmentLinkFormMixin, AttachmentFormBase):
pass
class EditAttachmentLinkForm(AttachmentLinkFormMixin, EditAttachmentFormBase):
pass
class AttachmentFolderForm(IndicoForm):
title = HiddenField(_("Name"), [DataRequired()], widget=TypeaheadWidget(),
description=_("The name of the folder."))
description = TextAreaField(_("Description"), description=_("Description of the folder and its content"))
protected = BooleanField(_("Protected"), widget=SwitchWidget())
acl = AccessControlListField(_("Access control list"), [UsedIf(lambda form, field: form.protected.data)],
allow_groups=True, allow_external_users=True, allow_event_roles=True,
allow_category_roles=True, allow_registration_forms=True,
event=lambda form: form.event,
default_text=_('Restrict access to this folder'),
description=_("The list of users and groups allowed to access the folder"))
is_always_visible = BooleanField(_("Always Visible"),
[HiddenUnless('is_hidden', value=False)],
widget=SwitchWidget(),
description=_("By default, folders are always visible, even if a user cannot "
"access them. You can disable this behavior here, hiding the folder "
"for anyone who does not have permission to access it."))
is_hidden = BooleanField(_("Always hidden"),
[HiddenUnless('is_always_visible', value=False)],
widget=SwitchWidget(),
description=_("Always hide the folder and its contents from public display areas of "
"the event. You can use this for folders to store non-image files used "
"e.g. in download links. The access permissions still apply."))
def __init__(self, *args, **kwargs):
self.linked_object = kwargs.pop('linked_object')
self.event = getattr(self.linked_object, 'event', None) # not present in categories
super(AttachmentFolderForm, self).__init__(*args, **kwargs)
self.title.choices = self._get_title_suggestions()
def _get_title_suggestions(self):
query = db.session.query(AttachmentFolder.title).filter_by(is_deleted=False, is_default=False,
object=self.linked_object)
existing = set(x[0] for x in query)
suggestions = set(get_default_folder_names()) - existing
if self.title.data:
suggestions.add(self.title.data)
return sorted(suggestions)
def validate_is_always_visible(self, field):
if self.is_always_visible.data and self.is_hidden.data:
raise ValidationError('These two options cannot be used at the same time')
validate_is_hidden = validate_is_always_visible
@generated_data
def protection_mode(self):
return ProtectionMode.protected if self.protected.data else ProtectionMode.inheriting
class AttachmentPackageForm(IndicoForm):
added_since = IndicoDateField(_('Added Since'), [Optional()],
description=_('Include only attachments uploaded after this date'))
filter_type = IndicoRadioField(_('Include'), [DataRequired()])
sessions = IndicoSelectMultipleCheckboxField(_('Sessions'),
[UsedIf(lambda form, _: form.filter_type.data == 'sessions'),
DataRequired()],
description=_('Include materials from selected sessions'),
coerce=int)
contributions = IndicoSelectMultipleCheckboxField(_('Contributions'),
[UsedIf(lambda form, _: form.filter_type.data == 'contributions'),
DataRequired()],
description=_('Include materials from selected contributions'),
coerce=int)
dates = IndicoSelectMultipleCheckboxField(_('Events scheduled on'),
[UsedIf(lambda form, _: form.filter_type.data == 'dates'),
DataRequired()],
description=_('Include materials from sessions/contributions scheduled '
'on the selected dates'))
| [
[
[
237,
253
]
],
[
[
297,
313
],
[
1357,
1373
]
],
[
[
341,
353
],
[
1292,
1304
],
[
4244,
4256
],
[
4880,
4892
],
[
5415,
5427
]
],
[
[
355,
368
],
[
2932,
2945
],
[
4136,
4149
]
],
[
[
402,
410
],
[
3719,
3727
]
],
[
[
445,
456
],
[
3985,
3996
]
],
[
[
458,
469
],
[
2872,
2883
],
[
3662,
3673
]
],
[
[
501,
513
],
[
2897,
2909
],
[
3687,
3699
],
[
3739,
3751
],
[
4009,
4021
],
[
7351,
7363
],
[
7594,
7606
],
[
8030,
8042
],
[
8455,
8467
]
],
[
[
515,
523
],
[
7185,
7193
]
],
[
[
525,
540
],
[
6821,
6836
]
],
[
[
569,
571
],
[
2622,
2624
],
[
6291,
6293
]
],
[
[
621,
635
],
[
2729,
2743
],
[
2782,
2796
],
[
7010,
7024
],
[
7063,
7077
]
],
[
[
690,
706
],
[
2473,
2489
],
[
2636,
2652
],
[
6308,
6324
]
],
[
[
751,
775
],
[
6542,
6566
]
],
[
[
805,
806
],
[
1305,
1306
],
[
1374,
1375
],
[
1416,
1417
],
[
1502,
1503
],
[
1665,
1666
],
[
2046,
2047
],
[
2130,
2131
],
[
2884,
2885
],
[
2946,
2947
],
[
3038,
3039
],
[
3442,
3443
],
[
3547,
3548
],
[
3674,
3675
],
[
3728,
3729
],
[
3997,
3998
],
[
4088,
4089
],
[
4150,
4151
],
[
4180,
4181
],
[
4257,
4258
],
[
4329,
4330
],
[
4710,
4711
],
[
4792,
4793
],
[
4893,
4894
],
[
5102,
5103
],
[
5428,
5429
],
[
5620,
5621
],
[
7166,
7167
],
[
7244,
7245
],
[
7336,
7337
],
[
7418,
7419
],
[
7672,
7673
],
[
7834,
7835
],
[
8113,
8114
],
[
8277,
8278
],
[
8530,
8531
]
],
[
[
841,
848
],
[
3160,
3167
]
],
[
[
883,
893
],
[
1263,
1273
],
[
3960,
3970
],
[
7119,
7129
]
],
[
[
895,
909
],
[
2668,
2682
],
[
6949,
6963
]
],
[
[
947,
969
],
[
1642,
1664
],
[
4306,
4328
]
],
[
[
971,
988
],
[
3424,
3441
]
],
[
[
990,
999
],
[
3028,
3037
]
],
[
[
1001,
1016
],
[
7150,
7165
]
],
[
[
1055,
1071
],
[
7319,
7335
]
],
[
[
1073,
1106
],
[
7384,
7417
],
[
7800,
7833
],
[
8243,
8276
]
],
[
[
1148,
1160
],
[
4952,
4964
],
[
5478,
5490
]
],
[
[
1162,
1168
],
[
1692,
1698
],
[
4356,
4362
],
[
7483,
7489
],
[
7909,
7915
],
[
8350,
8356
]
],
[
[
1206,
1218
],
[
1328,
1340
],
[
4280,
4292
],
[
5037,
5049
],
[
5563,
5575
]
],
[
[
1220,
1235
],
[
4033,
4048
]
],
[
[
1244,
1262
],
[
2839,
2857
],
[
2995,
3013
],
[
3811,
3829
],
[
2392,
2410
]
],
[
[
2816,
2838
],
[
3388,
3410
],
[
3897,
3919
]
],
[
[
2972,
2994
]
],
[
[
3077,
3091
],
[
3490,
3504
]
],
[
[
3365,
3387
]
],
[
[
3617,
3640
],
[
3786,
3809
],
[
3872,
3895
]
],
[
[
3764,
3785
]
],
[
[
3849,
3871
]
],
[
[
3939,
3959
],
[
6123,
6143
]
],
[
[
7097,
7118
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 6 20:55:32 2020
@author: arosso
"""
from recipe_scrapers import scrape_me
# give the url as a string, it can be url from any site listed below
# scraper = scrape_me('http://allrecipes.com/Recipe/Apple-Cake-Iv/Detail.aspx')
scraper = scrape_me('https://www.101cookbooks.com/instant-pot-mushroom-stroganoff/')
dict_recipe = dict()
dict_recipe['title'] = scraper.title()
dict_recipe['total_time'] = scraper.total_time()
dict_recipe['yields'] = scraper.yields()
dict_recipe['ingredients'] = scraper.ingredients()
dict_recipe['instructions'] = scraper.instructions()
#dict_recipe['image'] = scraper.image()
#dict_recipe['links'] = scraper.links()
print(dict_recipe)
| [
[
[
114,
123
],
[
284,
293
]
],
[
[
274,
281
],
[
411,
418
],
[
457,
464
],
[
508,
515
],
[
555,
562
],
[
607,
614
]
],
[
[
360,
371
],
[
381,
392
],
[
427,
438
],
[
478,
489
],
[
525,
536
],
[
577,
588
],
[
731,
742
]
]
] |
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
import itertools
import findspark
import pyspark
from pyspark.sql.functions import pandas_udf, PandasUDFType
from pyspark.sql.types import *
import time
def simulate_sbm_dc_data(sbm_matrix, sample_size=1000, partition_num=10, cluster_num=3):
"""
:param sbm_matrix:
:param sample_size:
:param partition_num:
:param cluster_num:
:return:
"""
if (sbm_matrix.shape[0] != cluster_num) | \
(sbm_matrix.shape[1] != cluster_num) | \
(sbm_matrix.shape[0] != sbm_matrix.shape[1]):
raise Exception("sbm_matrix shape Error or the Shape is not equal to Cluster_num")
else:
data_index = [x for x in range(sample_size)]
data_cluster = np.random.randint(0, cluster_num, sample_size).tolist()
index_cluster = dict(zip(data_index, data_cluster))
X = np.empty(shape=[0, 3], dtype=int)
X = np.append(X, [[0, -1, np.random.randint(0, partition_num, 1)[0]]], axis=0)
for i in range(1, sample_size):
p_num = np.random.randint(0, partition_num, 1)[0]
X = np.append(X, [[i, -1, p_num]], axis=0) # to avoid node lost
for j in range(i):
if np.random.binomial(1, sbm_matrix[index_cluster[i], index_cluster[j]], 1):
X = np.append(X, [[i, j, p_num]], axis=0)
data_pdf = pd.DataFrame(X, columns=["IndexNum1"] + ["IndexNum2"] + ["PartitionID"])
return data_pdf, index_cluster
def get_laplace_matrix(adjacency_matrix, position="master", regularization=False):
"""
:param adjacency_matrix: 邻接矩阵(方阵或长矩阵)
:param position: master或worker
:param regularization: 是否进行正则化
:return: 拉普拉斯矩阵
"""
if regularization:
if position == "master":
degree = np.sum(adjacency_matrix, axis=1)
d = np.diag((degree + np.mean(degree)) ** (-0.5)) # 得到度矩阵
return np.dot(np.dot(d, adjacency_matrix), d)
elif position == "worker":
# 2020.7.18 for test
out_degree = np.sum(adjacency_matrix, axis=1)
out_degree_matrix = np.diag((out_degree + np.mean(out_degree)) ** (-0.5))
for i in range(out_degree_matrix.shape[0]):
if out_degree_matrix[i, i] == np.infty:
out_degree_matrix[i, i] = 1000
in_degree = np.sum(adjacency_matrix, axis=0)
in_degree_matrix = np.diag((in_degree + np.mean(in_degree)) ** (-0.5))
###
laplace_matrix = np.dot(np.dot(out_degree_matrix, adjacency_matrix), in_degree_matrix)
return laplace_matrix
# D = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5))
# F = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5))
# return np.dot(np.dot(D, adjacency_matrix), F) # 得到度矩阵
else:
raise Exception("Input Error: worker or master is expected but {} are given".format(position))
else:
if position == "master":
d = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5)) # 得到度矩阵
return np.dot(np.dot(d, adjacency_matrix), d)
elif position == "worker":
out_degree_matrix = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5))
for i in range(out_degree_matrix.shape[0]):
if out_degree_matrix[i, i] == np.infty:
out_degree_matrix[i, i] = 10000
in_degree_matrix = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5))
laplace_matrix = np.dot(np.dot(out_degree_matrix, adjacency_matrix), in_degree_matrix)
return laplace_matrix
# D = np.diag(np.sum(adjacency_matrix, axis=1) ** (-0.5))
# F = np.diag(np.sum(adjacency_matrix, axis=0) ** (-0.5))
# return np.dot(np.dot(D, adjacency_matrix), F) # 得到度矩阵
else:
raise Exception("Input Error: worker or master is expected but {} are given".format(position))
def get_spectral(laplace_matrix, k, normalization=False, method='svd'):
"""
:param laplace_matrix: 拉普拉斯矩阵
:param k: 截取SVD后的前k个向量
:param normalization: 是否归一化
:param method: 选择用奇异值分解(SVD)还是特征值分解(EVD)
:return: 得到的谱
"""
if method == 'svd':
u, _, _ = np.linalg.svd(laplace_matrix)
spectral = u[:, list(range(k))]
if normalization:
row_len = len(u) # 行数
for i in range(row_len):
norm2 = np.linalg.norm(spectral[i])
if norm2:
spectral[i] = spectral[i] / np.linalg.norm(spectral[i])
elif method == 'evd':
e_vals, e_vecs = np.linalg.eig(laplace_matrix)
sorted_indices = np.argsort(e_vals)
spectral = e_vecs[:, sorted_indices[:-k-1:-1]]
if normalization:
row_len = len(e_vecs) # 行数
for i in range(row_len):
norm2 = np.linalg.norm(spectral[i])
if norm2:
spectral[i] = spectral[i] / np.linalg.norm(spectral[i])
else:
raise ValueError("method must be 'svd' or 'evd' but {} is given".format(method))
return spectral
def worker_clustering(worker_df, cluster_num):
"""
:param worker_df:
:param method:
:param cluster_num:
:return:
"""
node_list = list(set(worker_df["IndexNum1"].tolist()))
node_num = len(node_list)
index_list = [x for x in range(node_num)]
node2index = dict(zip(node_list, index_list))
adj_matrix = np.zeros((node_num, node_num), dtype=int)
for i in range(node_num):
adj_matrix[i][i] = 10
for row in worker_df.itertuples(index=False, name='Pandas'):
item1 = getattr(row, "IndexNum1")
item2 = getattr(row, "IndexNum2")
if (item2 in node_list) & (item2 != -1):
adj_matrix[node2index[item1]][node2index[item2]] = 1
adj_matrix[node2index[item2]][node2index[item1]] = 1
# first, get the laplace matrix
laplace_matrix = get_laplace_matrix(adj_matrix,
position='master',
regularization=False)
# second, get the spectral
spectral = get_spectral(laplace_matrix, cluster_num, normalization=False, method='svd')
# third, do k-means in spectral
model = KMeans(n_clusters=cluster_num)
model_fit = model.fit(spectral) # do k_means in spectral_transpose
# cluster_center = model_fit.cluster_centers_ # center points
cluster_label = list(model_fit.labels_) # labels (cluster information)
# return
worker_num = worker_df["PartitionID"].tolist()[0]
out_df = pd.DataFrame({"PartitionID": [worker_num for _ in range(len(node_list))],
"IndexNum": node_list,
"ClusterExp": cluster_label})
return out_df
def get_accurate(clustering_res_df, cluster_number, error=False):
"""
:param clustering_res_df: a pandas DataFrame about clustering result
:param cluster_number: the number of the cluster
(the first column is the index,
the second column is the right information,
the third column is the clustering information)
:param error: if error=True, then return the error rate, else, return the accuracy rate
:return: the clustering accuracy
"""
if clustering_res_df.shape[1] != 3:
raise Exception("Shape Error: the input DataFrame's column number is not 3")
real_dict = {}
clustering_dict = {}
for i in range(cluster_number):
real_df = clustering_res_df.loc[clustering_res_df['ClusterInfo'] == i]
clustering_df = clustering_res_df.loc[clustering_res_df['ClusterExp'] == i]
real_dict[i] = real_df['IndexNum'].tolist()
clustering_dict[i] = clustering_df['IndexNum'].tolist()
accuracy_matrix = np.zeros((cluster_number, cluster_number))
for i in range(cluster_number):
for j in range(cluster_number):
accuracy_matrix[i][j] = len(set(real_dict[i]).intersection(set(clustering_dict[j])))
# for test
# print("The accuracy matrix is: \n", accuracy_matrix)
case_iterator = itertools.permutations(range(cluster_number), cluster_number)
accurate = 0
for item in case_iterator:
acc = sum([accuracy_matrix[i][item[i]] for i in range(cluster_number)])
if acc > accurate:
accurate = acc
if not error:
return accurate / clustering_res_df.shape[0]
else:
return 1 - accurate / clustering_res_df.shape[0]
# TODO some SBM matrix
sbm_matrix1 = np.array([[0.7, 0.45, 0.45],
[0.45, 0.7, 0.45],
[0.45, 0.45, 0.7]])
sbm_matrix2 = np.array([[0.8, 0.4, 0.4],
[0.4, 0.8, 0.4],
[0.4, 0.4, 0.8]])
sbm_matrix3 = np.array([[0.6, 0.45, 0.45],
[0.45, 0.6, 0.45],
[0.45, 0.45, 0.6]])
sbm_matrix4 = np.array([[0.2, 0.1, 0.1],
[0.1, 0.2, 0.1],
[0.1, 0.1, 0.2]])
if __name__ == '__main__':
# Model Settings
sbm_matrix = sbm_matrix4
sample_size = 1000
master_num = 100
worker_per_sub = 20
partition_num = 50
cluster_num = 3
a, b = simulate_sbm_dc_data(sbm_matrix)
c = worker_clustering(a, 3)
real_label = []
for row in c.itertuples(index=False, name='Pandas'):
item = getattr(row, "IndexNum")
real_label.append(b[item])
c["ClusterInfo"] = real_label
print(get_accurate(c, 3))
print(c)
# print(a)
| [
[
[
7,
18
],
[
8576,
8578
],
[
8706,
8708
],
[
8830,
8832
],
[
8960,
8962
],
[
780,
782
],
[
908,
910
],
[
954,
956
],
[
976,
978
],
[
1089,
1091
],
[
1147,
1149
],
[
1258,
1260
],
[
1356,
1358
],
[
1835,
1837
],
[
1884,
1886
],
[
1902,
1904
],
[
1958,
1960
],
[
1965,
1967
],
[
2092,
2094
],
[
2157,
2159
],
[
2179,
2181
],
[
2313,
2315
],
[
2398,
2400
],
[
2462,
2464
],
[
2483,
2485
],
[
2559,
2561
],
[
2566,
2568
],
[
3055,
3057
],
[
3063,
3065
],
[
3135,
3137
],
[
3142,
3144
],
[
3242,
3244
],
[
3250,
3252
],
[
3396,
3398
],
[
3489,
3491
],
[
3497,
3499
],
[
3570,
3572
],
[
3577,
3579
],
[
4295,
4297
],
[
4487,
4489
],
[
4589,
4591
],
[
4668,
4670
],
[
4723,
4725
],
[
4924,
4926
],
[
5026,
5028
],
[
5519,
5521
],
[
7839,
7841
]
],
[
[
26,
38
],
[
1413,
1415
],
[
6658,
6660
]
],
[
[
67,
73
],
[
6332,
6338
]
],
[
[
81,
90
],
[
8149,
8158
]
],
[
[
98,
107
]
],
[
[
115,
122
]
],
[
[
157,
167
]
],
[
[
169,
182
]
],
[
[
213,
214
]
],
[
[
222,
226
]
],
[
[
233,
253
],
[
9272,
9292
]
],
[
[
1531,
1549
],
[
6007,
6025
]
],
[
[
4013,
4025
],
[
6206,
6218
]
],
[
[
5180,
5197
],
[
9313,
9330
]
],
[
[
6863,
6875
],
[
9533,
9545
]
],
[
[
8562,
8573
]
],
[
[
8692,
8703
]
],
[
[
8816,
8827
]
],
[
[
8946,
8957
],
[
9138,
9149
]
],
[
[
9125,
9135
],
[
9293,
9303
]
],
[
[
9154,
9165
]
],
[
[
9177,
9187
]
],
[
[
9198,
9212
]
],
[
[
9222,
9235
]
],
[
[
9245,
9256
]
],
[
[
9265,
9266
],
[
9331,
9332
]
],
[
[
9268,
9269
],
[
9480,
9481
]
],
[
[
9309,
9310
],
[
9372,
9373
],
[
9493,
9494
],
[
9546,
9547
],
[
9563,
9564
]
],
[
[
9341,
9351
],
[
9462,
9472
],
[
9512,
9522
]
],
[
[
9365,
9368
],
[
9437,
9440
]
],
[
[
9422,
9426
],
[
9482,
9486
]
]
] |
from __future__ import absolute_import
import six
from string import Formatter
class dontexplodedict(object):
"""
A dictionary that won't throw a KeyError and will
return back a sensible default value to be used in
string formatting.
"""
def __init__(self, d=None):
self.data = d or {}
def __getitem__(self, key):
return self.data.get(key, '')
class EventError(object):
INVALID_DATA = 'invalid_data'
INVALID_ATTRIBUTE = 'invalid_attribute'
VALUE_TOO_LONG = 'value_too_long'
UNKNOWN_ERROR = 'unknown_error'
SECURITY_VIOLATION = 'security_violation'
RESTRICTED_IP = 'restricted_ip'
JS_GENERIC_FETCH_ERROR = 'js_generic_fetch_error' # deprecated in favor of FETCH_GENERIC_ERROR
FETCH_GENERIC_ERROR = 'fetch_generic_error'
JS_INVALID_HTTP_CODE = 'js_invalid_http_code' # deprecated in favor of FETCH_INVALID_HTTP_CODE
FETCH_INVALID_HTTP_CODE = 'fetch_invalid_http_code'
JS_INVALID_CONTENT = 'js_invalid_content'
JS_NO_COLUMN = 'js_no_column'
JS_MISSING_SOURCE = 'js_no_source'
JS_INVALID_SOURCEMAP = 'js_invalid_source'
JS_TOO_MANY_REMOTE_SOURCES = 'js_too_many_sources'
JS_INVALID_SOURCE_ENCODING = 'js_invalid_source_encoding'
FETCH_INVALID_ENCODING = 'fetch_invalid_source_encoding'
JS_INVALID_SOURCEMAP_LOCATION = 'js_invalid_sourcemap_location'
JS_TOO_LARGE = 'js_too_large' # deprecated in favor of FETCH_TOO_LARGE
FETCH_TOO_LARGE = 'fetch_too_large'
JS_FETCH_TIMEOUT = 'js_fetch_timeout' # deprecated in favor of FETCH_TIMEOUT
FETCH_TIMEOUT = 'fetch_timeout'
NATIVE_NO_CRASHED_THREAD = 'native_no_crashed_thread'
NATIVE_INTERNAL_FAILURE = 'native_internal_failure'
NATIVE_NO_SYMSYND = 'native_no_symsynd'
NATIVE_BAD_DSYM = 'native_bad_dsym'
NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM = 'native_optionally_bundled_dsym'
NATIVE_MISSING_DSYM = 'native_missing_dsym'
NATIVE_MISSING_SYSTEM_DSYM = 'native_missing_system_dsym'
NATIVE_MISSING_SYMBOL = 'native_missing_symbol'
NATIVE_SIMULATOR_FRAME = 'native_simulator_frame'
NATIVE_UNKNOWN_IMAGE = 'native_unknown_image'
PROGUARD_MISSING_MAPPING = 'proguard_missing_mapping'
PROGUARD_MISSING_LINENO = 'proguard_missing_lineno'
_messages = {
INVALID_DATA: u'Discarded invalid value for parameter \'{name}\'',
INVALID_ATTRIBUTE: u'Discarded invalid parameter \'{name}\'',
VALUE_TOO_LONG: u'Discarded value for \'{name}\' due to exceeding maximum length',
UNKNOWN_ERROR: u'Unknown error',
SECURITY_VIOLATION: u'Cannot fetch resource due to security violation on {url}',
RESTRICTED_IP: u'Cannot fetch resource due to restricted IP address on {url}',
# deprecated in favor of FETCH_GENERIC_ERROR
JS_GENERIC_FETCH_ERROR: u'Unable to fetch resource: {url}',
FETCH_GENERIC_ERROR: u'Unable to fetch resource: {url}',
JS_INVALID_HTTP_CODE: u'HTTP returned {value} response on {url}',
# deprecated in favor of FETCH_INVALID_HTTP_CODE
FETCH_INVALID_HTTP_CODE: u'HTTP returned {value} response on {url}',
JS_INVALID_CONTENT: u'Source file was not JavaScript: {url}',
JS_NO_COLUMN: u'Cannot expand sourcemap due to no column information for {url}',
JS_MISSING_SOURCE: u'Source code was not found for {url}',
JS_INVALID_SOURCEMAP: u'Sourcemap was invalid or not parseable: {url}',
JS_TOO_MANY_REMOTE_SOURCES: u'The maximum number of remote source requests was made',
JS_INVALID_SOURCE_ENCODING: u'Source file was not \'{value}\' encoding: {url}',
FETCH_INVALID_ENCODING: u'Source file was not \'{value}\' encoding: {url}',
JS_INVALID_SOURCEMAP_LOCATION: u'Invalid location in sourcemap: ({column}, {row})',
# deprecated in favor of FETCH_TOO_LARGE
JS_TOO_LARGE: u'Remote file too large: ({max_size:g}MB, {url})',
FETCH_TOO_LARGE: u'Remote file too large: ({max_size:g}MB, {url})',
# deprecated in favor of FETCH_TIMEOUT
JS_FETCH_TIMEOUT: u'Remote file took too long to load: ({timeout}s, {url})',
FETCH_TIMEOUT: u'Remote file took too long to load: ({timeout}s, {url})',
NATIVE_NO_CRASHED_THREAD: u'No crashed thread found in crash report',
NATIVE_INTERNAL_FAILURE: u'Internal failure when attempting to symbolicate: {error}',
NATIVE_NO_SYMSYND: u'The symbolizer is not configured for this system.',
NATIVE_BAD_DSYM: u'The debug symbol file used was broken.',
NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM: u'An optional debug symbol file was missing.',
NATIVE_MISSING_DSYM: u'A required debug symbol file was missing.',
NATIVE_MISSING_SYSTEM_DSYM: u'A system debug symbol file was missing.',
NATIVE_MISSING_SYMBOL: u'Unable to resolve a symbol.',
NATIVE_SIMULATOR_FRAME: u'Encountered an unprocessable simulator frame.',
NATIVE_UNKNOWN_IMAGE: u'An binary image is referenced that is unknown.',
PROGUARD_MISSING_MAPPING: u'A proguard mapping file was missing.',
PROGUARD_MISSING_LINENO: u'A proguard mapping file does not contain line info.',
}
@classmethod
def get_message(cls, data):
return Formatter().vformat(
cls._messages[data['type']],
[],
dontexplodedict(data),
)
def to_dict(self):
return {k: v for k, v in six.iteritems(self) if k != 'type'}
| [
[
[
23,
38
]
],
[
[
47,
50
],
[
5416,
5419
]
],
[
[
70,
79
],
[
5236,
5245
]
],
[
[
88,
103
],
[
5326,
5341
]
],
[
[
401,
411
]
]
] |
import os.path as pt
import numpy as np
import torchvision.transforms as transforms
import torch
from torch.utils.data import DataLoader
from torchvision.datasets import EMNIST
def ceil(x: float):
return int(np.ceil(x))
class MyEMNIST(EMNIST):
""" Reimplements get_item to transform tensor input to pil image before applying transformation. """
def __getitem__(self, index):
img, target = self.data[index], self.targets[index]
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = transforms.ToPILImage()(img)
if self.target_transform is not None:
target = self.target_transform(target)
if self.transform is not None:
img = self.transform(img)
return img, target
class OEEMNIST(EMNIST):
def __init__(self, size: torch.Size, root: str = None, split='letters', limit_var=20): # split = Train
"""
Outlier Exposure dataset for EMNIST.
:param size: size of the samples in n x c x h x w, samples will be resized to h x w. If n is larger than the
number of samples available in EMNIST, dataset will be enlarged by repetitions to fit n.
This is important as exactly n images are extracted per iteration of the data_loader.
For online supervision n should be set to 1 because only one sample is extracted at a time.
:param root: root directory where data is found or is to be downloaded to.
:param split: The dataset has 6 different splits: ``byclass``, ``bymerge``,
``balanced``, ``letters``, ``digits`` and ``mnist``. This argument specifies
which one to use.
:param limit_var: limits the number of different samples, i.e. randomly chooses limit_var many samples
from all available ones to be the training data.
"""
assert len(size) == 3 and size[1] == size[2]
root = pt.join(root, 'emnist', )
transform = transforms.Compose([
transforms.Resize((size[1], size[2])),
transforms.ToTensor()
])
super().__init__(root, split, transform=transform, download=True)
self.size = size
self.data = self.data.transpose(1, 2)
self.idx_to_class = {v: k for k, v in self.class_to_idx.items()}
if limit_var is not None and limit_var < len(self):
picks = np.random.choice(np.arange(self.data.size(0)), size=limit_var, replace=False)
self.data = self.data[picks]
self.targets = self.targets[picks]
if limit_var is not None and limit_var > len(self):
print(
'OEEMNIST shall be limited to {} samples, but Cifar100 contains only {} samples, thus using all.'
.format(limit_var, len(self))
)
if len(self) < size[0]:
rep = ceil(size[0] / len(self))
old = len(self)
self.data = self.data.repeat(rep, 1, 1)
self.targets = self.targets.repeat(rep)
if rep != size[0] / old:
import warnings
warnings.warn(
'OEEMNIST has been limited to {} samples. '
'Due to the requested size of {}, the dataset will be enlarged. '
'But {} repetitions will make some samples appear more often than others in the dataset, '
'because the final size after repetitions is {}, which is cut to {}'
.format(limit_var, size[0], rep, len(self), size[0])
)
def data_loader(self):
return DataLoader(dataset=self, batch_size=self.size[0], shuffle=True, num_workers=0)
def __getitem__(self, index):
sample, target = super().__getitem__(index)
sample = sample.squeeze().mul(255).byte()
return sample
| [
[
[
7,
20
],
[
1951,
1953
]
],
[
[
29,
40
],
[
215,
217
],
[
2412,
2414
],
[
2429,
2431
]
],
[
[
48,
84
],
[
569,
579
],
[
1997,
2007
],
[
2030,
2040
],
[
2081,
2091
]
],
[
[
92,
97
],
[
857,
862
]
],
[
[
127,
137
],
[
3624,
3634
]
],
[
[
171,
177
],
[
244,
250
],
[
819,
825
]
],
[
[
184,
188
],
[
2881,
2885
]
],
[
[
235,
243
]
],
[
[
810,
818
]
]
] |
import numpy as np
import torch
import torch.nn as nn
from rgb_stacking.utils.utils import init
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
class Sum(nn.Module):
def __init__(self, dim):
super().__init__()
self.dim = dim
def forward(self, x):
return torch.sum(x, self.dim)
class Mean(nn.Module):
def __init__(self, dim):
super().__init__()
self.dim = dim
def forward(self, x):
return torch.mean(x, self.dim)
def init_rec(rec):
for name, param in rec.named_parameters():
if 'bias' in name:
nn.init.constant_(param, 0)
elif 'weight' in name:
nn.init.orthogonal_(param)
return rec
def init_(m):
return init(m, nn.init.orthogonal_, lambda x: nn.init.
constant_(x, 0), np.sqrt(2))
| [
[
[
7,
18
],
[
853,
855
]
],
[
[
26,
31
],
[
332,
337
],
[
501,
506
]
],
[
[
39,
53
],
[
112,
114
],
[
199,
201
],
[
368,
370
],
[
632,
634
],
[
703,
705
],
[
780,
782
],
[
811,
813
]
],
[
[
91,
95
],
[
772,
776
]
],
[
[
104,
111
]
],
[
[
195,
198
]
],
[
[
363,
367
]
],
[
[
531,
539
]
],
[
[
751,
756
]
]
] |
#!/usr/bin/env python
# coding=utf-8
# Stan 2018-08-04
import sys
if sys.version_info >= (3,):
class aStr():
def __str__(self):
return self.__unicode__()
def cmp(a, b):
return (a > b) - (a < b)
# range = range
def b(s):
return s.encode('utf-8')
def u(s):
return s.decode('utf-8')
# bytes = bytes
unicode = str
string_types = str,
numeric_types = int, float, complex
simple_types = int, float, complex, str, bytearray
collections_types = list, tuple, set, frozenset
all_types = (int, float, complex, str, bytearray,
list, tuple, set, frozenset, dict)
else:
class aStr():
def __str__(self):
return self.__unicode__().encode('utf-8')
# cmp = cmp
range = xrange
def b(s):
return s
def u(s):
return s
bytes = str
# unicode = unicode
string_types = basestring,
numeric_types = int, long, float, complex
simple_types = int, long, float, complex, basestring, bytearray
collections_types = list, tuple, set, frozenset
all_types = (int, long, float, complex, basestring, bytearray,
list, tuple, set, frozenset, dict)
| [
[
[
64,
67
],
[
73,
76
]
],
[
[
109,
113
]
],
[
[
191,
194
]
],
[
[
263,
264
]
],
[
[
311,
312
]
],
[
[
373,
380
]
],
[
[
392,
404
]
],
[
[
416,
429
]
],
[
[
456,
468
]
],
[
[
511,
528
]
],
[
[
563,
572
]
],
[
[
682,
686
]
],
[
[
791,
796
]
],
[
[
815,
816
]
],
[
[
847,
848
]
],
[
[
875,
880
]
],
[
[
914,
926
]
],
[
[
945,
958
]
],
[
[
991,
1003
]
],
[
[
1059,
1076
]
],
[
[
1111,
1120
]
]
] |
# A four-digit integer is given. Find the sum of even digits in it.
# Create a variable "var_int" and assign it a four-digit integer value.
# Create a variable "sum_even" and assign it 0.
# Find the sum of the even digits in the variable "var_int".
var_int = 1184
sum_even = 0
x1 = var_int % 10
var_int //= 10
sum_even += (x1 + 1) % 2 * x1
x2 = var_int % 10
var_int //= 10
sum_even += (x2 + 1) % 2 * x2
x3 = var_int % 10
var_int //= 10
sum_even += (x3 + 1) % 2 * x3
x4 = var_int % 10
var_int //= 10
sum_even += (x4 + 1) % 2 * x4
print(sum_even) | [
[
[
253,
260
],
[
286,
293
],
[
299,
306
]
],
[
[
268,
276
],
[
314,
322
]
],
[
[
281,
283
],
[
327,
329
],
[
341,
343
]
],
[
[
345,
347
],
[
391,
393
],
[
405,
407
]
],
[
[
409,
411
],
[
455,
457
],
[
469,
471
]
],
[
[
473,
475
],
[
519,
521
],
[
533,
535
]
]
] |
# Permafrost Forms
from django.conf import settings
from django.contrib.auth.models import Permission
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.forms import ModelForm
from django.forms.fields import CharField, ChoiceField, BooleanField
from django.forms.models import ModelMultipleChoiceField
from django.forms.widgets import CheckboxInput
from django.utils.translation import ugettext_lazy as _
from .models import PermafrostRole, get_optional_by_category, get_choices
CHOICES = [('', _("Choose Role Type"))] + get_choices()
LABELS = {
'name': _('Role Name'),
'category': _('Role Type')
}
def assemble_optiongroups_for_widget(permissions):
choices = []
optgroups = {}
if permissions:
for perm in permissions:
if perm.content_type.name in optgroups:
optgroups[perm.content_type.name].append((perm.pk, perm.name,))
else:
optgroups[perm.content_type.name] = [(perm.pk, perm.name,)]
for model_name, options in optgroups.items():
choices.append([model_name, options])
return choices
def bootstrappify(fields):
for field in fields:
widget = fields[field].widget
if not isinstance(widget, CheckboxInput):
if 'class' in widget.attrs:
widget.attrs['class'] = widget.attrs['class'] + " form-control"
else:
widget.attrs.update({'class':'form-control'})
class SelectPermafrostRoleTypeForm(ModelForm):
name = CharField(required=False)
description = CharField(required=False)
category = ChoiceField(choices=CHOICES)
class Meta:
model = PermafrostRole
fields = ('name', 'description', 'category',)
labels = LABELS
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
bootstrappify(self.fields)
class PermafrostRoleCreateForm(ModelForm):
permissions = ModelMultipleChoiceField(queryset=Permission.objects.all(), required=False)
class Meta:
model = PermafrostRole
fields = ('name', 'description', 'category', 'permissions')
labels = LABELS
def __init__(self, *args, **kwargs):
self.site = kwargs.pop('site', Site.objects.get_current())
super().__init__(*args, **kwargs)
self.fields['category'].choices = CHOICES
category = self.initial.get(
'category',
self.data.get('category', None)
)
if self.instance:
category = self.instance.category if self.instance.category else category
if category:
all_optional_permissions = get_optional_by_category(category=category)
ids = [perm.pk for perm in all_optional_permissions]
self.fields['permissions'].queryset = Permission.objects.filter(id__in=ids)
bootstrappify(self.fields)
def save(self, commit=True):
self.instance.site = self.site
instance = super().save(commit)
category = instance.category
if 'permissions' in self.cleaned_data:
perm_ids = []
if category:
perm_ids = self.cleaned_data['permissions']
if perm_ids:
instance.permissions_set(Permission.objects.filter(id__in=perm_ids))
else:
instance.permissions_clear()
return instance
def clean_name(self):
name = self.cleaned_data['name']
name_exists = False
if self.instance: ## on update check if name change exists
if 'name' in self.changed_data:
name_exists = PermafrostRole.objects.filter(
name=name,
site=self.site,
).exclude(pk=self.instance.pk).first()
else:
try:
name_exists = PermafrostRole.objects.get(
name=name,
site=self.site
)
except PermafrostRole.DoesNotExist:
pass
if name_exists:
raise ValidationError('Role with this name already exists')
# Always return field
return name
class PermafrostRoleUpdateForm(PermafrostRoleCreateForm):
"""
Form used to display role detail
Only allowed to edit optional permissions, name and description
Category and required permissions stay locked
"""
deleted = BooleanField(required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['category'].widget.attrs.update({'readonly': True, 'disabled': True})
self.fields['category'].disabled = True
self.fields['category'].required = False
self.fields['category'].choices = [choice for choice in CHOICES if choice[0] == self.instance.category]
self.fields['category'].initial = self.instance.category
## limit choices to saved category
self.fields['deleted'].initial = self.instance.deleted
def save(self, commit=True):
if self.cleaned_data['deleted']:
self.instance.deleted = self.cleaned_data['deleted']
instance = super().save(commit)
return instance
| [
[
[
43,
51
]
],
[
[
91,
101
],
[
2021,
2031
],
[
2886,
2896
],
[
3351,
3361
]
],
[
[
142,
146
],
[
2283,
2287
]
],
[
[
182,
197
],
[
4193,
4208
]
],
[
[
223,
232
],
[
1532,
1541
],
[
1957,
1966
]
],
[
[
265,
274
],
[
1555,
1564
],
[
1599,
1608
]
],
[
[
276,
287
],
[
1640,
1651
]
],
[
[
289,
301
],
[
4545,
4557
]
],
[
[
334,
358
],
[
1987,
2011
]
],
[
[
392,
405
],
[
1279,
1292
]
],
[
[
443,
461
],
[
553,
554
],
[
617,
618
],
[
649,
650
]
],
[
[
482,
496
],
[
1706,
1720
],
[
2095,
2109
],
[
3729,
3743
],
[
3960,
3974
],
[
4092,
4106
]
],
[
[
498,
522
],
[
2718,
2742
]
],
[
[
524,
535
],
[
579,
590
]
],
[
[
537,
544
],
[
1660,
1667
],
[
2395,
2402
],
[
4922,
4929
]
],
[
[
594,
600
],
[
1792,
1798
],
[
2195,
2201
]
],
[
[
671,
703
]
],
[
[
1159,
1172
],
[
1891,
1904
],
[
2933,
2946
]
],
[
[
1503,
1531
]
],
[
[
1932,
1956
],
[
4330,
4354
]
],
[
[
4305,
4329
]
]
] |
import os
import numpy as np
import pandas as pd
from qlib.data.dataset.processor import Processor
from qlib.data.dataset.utils import fetch_df_by_index
from typing import Dict
class HighFreqTrans(Processor):
def __init__(self, dtype: str = "bool"):
self.dtype = dtype
def fit(self, df_features):
pass
def __call__(self, df_features):
if self.dtype == "bool":
return df_features.astype(np.int8)
else:
return df_features.astype(np.float32)
class HighFreqNorm(Processor):
def __init__(
self,
fit_start_time: pd.Timestamp,
fit_end_time: pd.Timestamp,
feature_save_dir: str,
norm_groups: Dict[str, int],
):
self.fit_start_time = fit_start_time
self.fit_end_time = fit_end_time
self.feature_save_dir = feature_save_dir
self.norm_groups = norm_groups
def fit(self, df_features) -> None:
if os.path.exists(self.feature_save_dir) and len(os.listdir(self.feature_save_dir)) != 0:
return
os.makedirs(self.feature_save_dir)
fetch_df = fetch_df_by_index(df_features, slice(self.fit_start_time, self.fit_end_time), level="datetime")
del df_features
index = 0
names = {}
for name, dim in self.norm_groups.items():
names[name] = slice(index, index + dim)
index += dim
for name, name_val in names.items():
df_values = fetch_df.iloc(axis=1)[name_val].values
if name.endswith("volume"):
df_values = np.log1p(df_values)
self.feature_mean = np.nanmean(df_values)
np.save(self.feature_save_dir + name + "_mean.npy", self.feature_mean)
df_values = df_values - self.feature_mean
self.feature_std = np.nanstd(np.absolute(df_values))
np.save(self.feature_save_dir + name + "_std.npy", self.feature_std)
df_values = df_values / self.feature_std
np.save(self.feature_save_dir + name + "_vmax.npy", np.nanmax(df_values))
np.save(self.feature_save_dir + name + "_vmin.npy", np.nanmin(df_values))
return
def __call__(self, df_features):
if "date" in df_features:
df_features.droplevel("date", inplace=True)
df_values = df_features.values
index = 0
names = {}
for name, dim in self.norm_groups.items():
names[name] = slice(index, index + dim)
index += dim
for name, name_val in names.items():
feature_mean = np.load(self.feature_save_dir + name + "_mean.npy")
feature_std = np.load(self.feature_save_dir + name + "_std.npy")
if name.endswith("volume"):
df_values[:, name_val] = np.log1p(df_values[:, name_val])
df_values[:, name_val] -= feature_mean
df_values[:, name_val] /= feature_std
df_features = pd.DataFrame(data=df_values, index=df_features.index, columns=df_features.columns)
return df_features.fillna(0)
| [
[
[
7,
9
],
[
953,
955
],
[
999,
1001
],
[
1067,
1069
]
],
[
[
18,
29
],
[
439,
441
],
[
500,
502
],
[
1582,
1584
],
[
1634,
1636
],
[
1668,
1670
],
[
1824,
1826
],
[
1834,
1836
],
[
1870,
1872
],
[
2004,
2006
],
[
2056,
2058
],
[
2090,
2092
],
[
2142,
2144
],
[
2583,
2585
],
[
2661,
2663
],
[
2794,
2796
]
],
[
[
37,
49
],
[
601,
603
],
[
637,
639
],
[
2950,
2952
]
],
[
[
90,
99
],
[
200,
209
],
[
533,
542
]
],
[
[
136,
153
],
[
1121,
1138
]
],
[
[
173,
177
],
[
703,
707
]
],
[
[
186,
199
]
],
[
[
520,
532
]
]
] |
import torch
import torch.nn as nn
use_cuda = torch.cuda.is_available()
class CNNClassifier(nn.Module):
def __init__(self, channel, SHHS=False):
super(CNNClassifier, self).__init__()
conv1 = nn.Conv2d(1, 10, (1, 200))
pool1 = nn.MaxPool2d((1, 2))
if channel == 1:
conv2 = nn.Conv2d(10, 20, (1, 32))
conv3 = nn.Conv2d(20, 30, (1, 128))
conv4 = nn.Conv2d(30, 40, (1, 512))
freq = 1
else:
conv2 = nn.Conv2d(10, 20, (2, 32))
conv3 = nn.Conv2d(20, 30, (2, 128))
conv4 = nn.Conv2d(30, 40, (2, 512))
freq=channel-3
pool2 = nn.MaxPool2d((1, 2))
self.conv_module = nn.Sequential(conv1, nn.ReLU(), pool1, conv2, nn.ReLU(), conv3, nn.ReLU(), conv4, nn.ReLU(), pool2)
if SHHS:
fc1 = nn.Linear(freq * 40 * 553, 100)
else:
fc1 = nn.Linear(freq*40*365, 100)
fc2 = nn.Linear(100, 5)
self.fc_module = nn.Sequential(fc1, nn.ReLU(), fc2)
if use_cuda:
self.conv_module = self.conv_module.cuda()
self.fc_module = self.fc_module.cuda()
def forward(self, x, isfc):
out = self.conv_module(x)
dim = 1
for d in out.size()[1:]:
dim *= d
if isfc:
out = out.view(-1, dim)
out = self.fc_module(out)
else:
out = out.permute(0, 3, 2, 1).reshape([-1, 200, 73])
return out | [
[
[
7,
12
],
[
47,
52
]
],
[
[
20,
34
],
[
93,
95
],
[
212,
214
],
[
255,
257
],
[
321,
323
],
[
368,
370
],
[
416,
418
],
[
499,
501
],
[
546,
548
],
[
594,
596
],
[
665,
667
],
[
713,
715
],
[
734,
736
],
[
759,
761
],
[
777,
779
],
[
795,
797
],
[
849,
851
],
[
913,
915
],
[
955,
957
],
[
999,
1001
],
[
1018,
1020
]
],
[
[
36,
44
],
[
1046,
1054
]
],
[
[
79,
92
],
[
164,
177
]
]
] |
from math import sqrt
# function with int parameter
def my_function(a: str):
print(a)
my_function(3)
# function with type annotation
def my_function2(a: str) -> str:
return a
print(my_function2(3))
# import sqrt from math and use it
print(sqrt(9.4323))
# import alias from math
# from math import sqrt as square_root
# function with list parameter
def my_function3(a: list):
for i in a:
print(i)
my_function3([1, 2, 3, 4, 5])
# function with dictionary parameter
def my_function4(a: dict):
for key, value in a.items():
print(key, value)
my_function4({'a': 1, 'b': 2, 'c': 3})
# function with tuple parameter
def my_function5(a: tuple):
for i in a:
print(i)
my_function5(('a', 'b', 'c', 'd'))
# function with set parameter
def my_function6(a: set):
for i in a:
print(i)
my_function6({'a', 'b', 'c', 'd'})
# function with function parameter
def my_function7(a: callable):
a()
# make an http request async
async def my_function8(a: callable):
a()
# my_function8(lambda: print('hello'))
| [
[
[
17,
21
],
[
246,
250
]
],
[
[
57,
68
],
[
89,
100
]
],
[
[
141,
153
],
[
187,
199
]
],
[
[
361,
373
],
[
409,
421
]
],
[
[
481,
493
],
[
555,
567
]
],
[
[
631,
643
],
[
680,
692
]
],
[
[
750,
762
],
[
797,
809
]
],
[
[
872,
884
]
],
[
[
934,
975
]
]
] |
import cv_datetime_utils
import cv2 as cv
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize
import json
import os
def compose_transformations(
rotation_vector_1,
translation_vector_1,
rotation_vector_2,
translation_vector_2):
rotation_vector_1 = np.asarray(rotation_vector_1).reshape(3)
translation_vector_1 = np.asarray(translation_vector_1).reshape(3)
rotation_vector_2 = np.asarray(rotation_vector_2).reshape(3)
translation_vector_2 = np.asarray(translation_vector_2).reshape(3)
rotation_vector_composed, translation_vector_composed = cv.composeRT(
rotation_vector_1,
translation_vector_1,
rotation_vector_2,
translation_vector_2)[:2]
rotation_vector_composed = np.squeeze(rotation_vector_composed)
translation_vector_composed = np.squeeze(translation_vector_composed)
return rotation_vector_composed, translation_vector_composed
def invert_transformation(
rotation_vector,
translation_vector):
rotation_vector = np.asarray(rotation_vector).reshape(3)
translation_vector = np.asarray(translation_vector).reshape(3)
new_rotation_vector, new_translation_vector = compose_transformations(
np.array([0.0, 0.0, 0.0]),
-translation_vector,
-rotation_vector,
np.array([0.0, 0.0, 0.0]))
new_rotation_vector = np.squeeze(new_rotation_vector)
new_translation_vector = np.squeeze(new_translation_vector)
return new_rotation_vector, new_translation_vector
def quaternion_vector_to_rotation_vector(quaternion_vector):
quaternion_vector = np.asarray(quaternion_vector).reshape(4)
spatial_vector = quaternion_vector[1:]
qw = quaternion_vector[0]
spatial_vector_length = np.linalg.norm(spatial_vector)
unit_vector = spatial_vector/spatial_vector_length
theta = 2*np.arctan2(spatial_vector_length, qw)
rotation_vector = theta*unit_vector
return rotation_vector
def quaternion_vector_to_rotation_matrix(quaternion_vector):
quaternion_tuple = tuple(np.asarray(quaternion_vector).reshape(4))
qw, qx, qy, qz = quaternion_tuple
R = np.array([
[qw**2 + qx**2 - qy**2 - qz**2, 2*(qx*qy - qw*qz), 2*(qw*qy + qx*qz)],
[2*(qx*qy + qw*qz), qw**2 - qx**2 + qy**2 - qz**2, 2*(qy*qz - qw*qx)],
[2*(qx*qz - qw*qy), 2*(qw*qx + qy*qz), qw**2 - qx**2 - qy**2 + qz**2]
])
return R
def rotation_vector_to_rotation_matrix(rotation_vector):
rotation_vector = np.asarray(rotation_vector).reshape(3)
rotation_matrix = cv.Rodrigues(rotation_vector)[0]
return rotation_matrix
def transform_object_points(
object_points,
rotation_vector=np.array([0.0, 0.0, 0.0]),
translation_vector=np.array([0.0, 0.0, 0.0])):
object_points = np.asarray(object_points)
rotation_vector = np.asarray(rotation_vector)
translation_vector = np.asarray(translation_vector)
if object_points.size == 0:
return object_points
object_points = object_points.reshape((-1, 3))
rotation_vector = rotation_vector.reshape(3)
translation_vector = translation_vector.reshape(3)
transformed_points = np.add(
np.matmul(
cv.Rodrigues(rotation_vector)[0],
object_points.T).T,
translation_vector.reshape((1, 3)))
transformed_points = np.squeeze(transformed_points)
return transformed_points
def generate_camera_pose(
camera_position=np.array([0.0, 0.0, 0.0]),
yaw=0.0,
pitch=0.0,
roll=0.0):
# yaw: 0.0 points north (along the positive y-axis), positive angles rotate counter-clockwise
# pitch: 0.0 is level with the ground, positive angles rotate upward
# roll: 0.0 is level with the ground, positive angles rotate clockwise
# All angles in radians
camera_position = np.asarray(camera_position).reshape(3)
# First: Move the camera to the specified position
rotation_vector_1 = np.array([0.0, 0.0, 0.0])
translation_vector_1 = -camera_position
# Second: Rotate the camera so when we lower to the specified inclination, it will point in the specified compass direction
rotation_vector_2 = np.array([0.0, 0.0, -(yaw - np.pi / 2)])
translation_vector_2 = np.array([0.0, 0.0, 0.0])
# Third: Lower to the specified inclination
rotation_vector_2_3 = np.array([(np.pi / 2 - pitch), 0.0, 0.0])
translation_vector_2_3 = np.array([0.0, 0.0, 0.0])
# Fourth: Roll the camera by the specified angle
rotation_vector_2_3_4 = np.array([0.0, 0.0, -roll])
translation_vector_2_3_4 = np.array([0.0, 0.0, 0.0])
# Combine these four moves
rotation_vector_1_2, translation_vector_1_2 = compose_transformations(
rotation_vector_1,
translation_vector_1,
rotation_vector_2,
translation_vector_2)
rotation_vector_1_2_3, translation_vector_1_2_3 = compose_transformations(
rotation_vector_1_2,
translation_vector_1_2,
rotation_vector_2_3,
translation_vector_2_3)
rotation_vector, translation_vector = compose_transformations(
rotation_vector_1_2_3,
translation_vector_1_2_3,
rotation_vector_2_3_4,
translation_vector_2_3_4)
rotation_vector = np.squeeze(rotation_vector)
translation_vector = np.squeeze(translation_vector)
return rotation_vector, translation_vector
def extract_camera_position(
rotation_vector,
translation_vector):
rotation_vector = np.asarray(rotation_vector).reshape(3)
translation_vector = np.asarray(translation_vector).reshape(3)
new_rotation_vector, new_translation_vector = compose_transformations(
rotation_vector,
translation_vector,
-rotation_vector,
np.array([0.0, 0.0, 0.0]))
camera_position = -np.squeeze(new_translation_vector)
return camera_position
def extract_camera_position_rotation_matrix(rotation_matrix, translation_vector):
rotation_matrix = np.asarray(rotation_matrix).reshape((3,3))
translation_vector = np.asarray(translation_vector).reshape(3)
position = np.matmul(rotation_matrix.T, -translation_vector.T)
return position
def extract_camera_direction(
rotation_vector,
translation_vector):
rotation_vector = np.asarray(rotation_vector).reshape(3)
translation_vector = np.asarray(translation_vector).reshape(3)
camera_direction = np.matmul(
cv.Rodrigues(-rotation_vector)[0],
np.array([[0.0], [0.0], [1.0]]))
camera_direction = np.squeeze(camera_direction)
return camera_direction
def reconstruct_z_rotation(x, y):
if x >= 0.0 and y >= 0.0:
return np.arctan(y / x)
if x >= 0.0 and y < 0.0:
return np.arctan(y / x) + 2 * np.pi
return np.arctan(y / x) + np.pi
# Currently unused; needs to be fixed up for cases in which x and/or y are close
# to zero
def extract_yaw_from_camera_direction(
camera_direction):
camera_direction = np.asarray(camera_direction).reshape(3)
yaw = reconstruct_z_rotation(
camera_direction[0],
camera_direction[1])
return yaw
def generate_camera_matrix(
focal_length,
principal_point):
focal_length = np.asarray(focal_length).reshape(2)
principal_point = np.asarray(principal_point).reshape(2)
camera_matrix = np.array([
[focal_length[0], 0, principal_point[0]],
[0, focal_length[1], principal_point[1]],
[0, 0, 1.0]])
return camera_matrix
def generate_projection_matrix(
camera_matrix,
rotation_vector,
translation_vector):
camera_matrix = np.asarray(camera_matrix).reshape((3, 3))
rotation_vector = np.asarray(rotation_vector).reshape(3)
translation_vector = np.asarray(translation_vector).reshape(3)
projection_matrix = np.matmul(
camera_matrix,
np.concatenate((
cv.Rodrigues(rotation_vector)[0],
translation_vector.reshape((3, 1))),
axis=1))
return(projection_matrix)
def ground_grid_camera_view(
image_width,
image_height,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients=np.array([0.0, 0.0, 0.0, 0.0]),
fill_image=False,
step=0.1
):
grid_corners = ground_rectangle_camera_view(
image_width=image_width,
image_height=image_height,
rotation_vector=rotation_vector,
translation_vector=translation_vector,
camera_matrix=camera_matrix,
distortion_coefficients=distortion_coefficients,
fill_image=fill_image
)
grid_points = generate_ground_grid(
grid_corners=grid_corners,
step=step
)
return grid_points
def ground_rectangle_camera_view(
image_width,
image_height,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients=np.array([0.0, 0.0, 0.0, 0.0]),
fill_image=False
):
image_points = np.array([
[0.0, 0.0],
[image_width, 0.0],
[image_width, image_height],
[0.0, image_height]
])
ground_points=np.empty((4, 3))
for i in range(4):
ground_points[i] = ground_point(
image_point=image_points[i],
rotation_vector=rotation_vector,
translation_vector=translation_vector,
camera_matrix=camera_matrix,
distortion_coefficients=distortion_coefficients
)
x_values_sorted = np.sort(ground_points[:, 0])
y_values_sorted = np.sort(ground_points[:, 1])
if fill_image:
x_min = x_values_sorted[0]
x_max = x_values_sorted[3]
y_min = y_values_sorted[0]
y_max = y_values_sorted[3]
else:
x_min = x_values_sorted[1]
x_max = x_values_sorted[2]
y_min = y_values_sorted[1]
y_max = y_values_sorted[2]
return np.array([
[x_min, y_min],
[x_max, y_max]
])
def ground_point(
image_point,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients=np.array([0.0, 0.0, 0.0, 0.0])
):
image_point = np.asarray(image_point)
rotation_vector = np.asarray(rotation_vector)
translation_vector = np.asarray(translation_vector)
camera_matrix = np.asarray(camera_matrix)
distortion_coefficients = np.asarray(distortion_coefficients)
image_point = image_point.reshape((2))
rotation_vector = rotation_vector.reshape(3)
translation_vector = translation_vector.reshape(3)
camera_matrix = camera_matrix.reshape((3, 3))
image_point_undistorted = cv.undistortPoints(
image_point,
camera_matrix,
distortion_coefficients,
P=camera_matrix
)
image_point_undistorted = np.squeeze(image_point_undistorted)
camera_position = np.matmul(
cv.Rodrigues(-rotation_vector)[0],
-translation_vector.T
).T
camera_point_homogeneous = np.matmul(
np.linalg.inv(camera_matrix),
np.array([image_point_undistorted[0], image_point_undistorted[1], 1.0]).T
).T
camera_direction = np.matmul(
cv.Rodrigues(-rotation_vector)[0],
camera_point_homogeneous.T
).T
theta = -camera_position[2]/camera_direction[2]
ground_point = camera_position + theta*camera_direction
return ground_point
def generate_ground_grid(
grid_corners,
step=0.1
):
x_grid, y_grid = np.meshgrid(
np.arange(grid_corners[0, 0], grid_corners[1, 0], step=step),
np.arange(grid_corners[0, 1], grid_corners[1, 1], step=step)
)
grid = np.stack((x_grid, y_grid, np.full_like(x_grid, 0.0)), axis=-1)
points = grid.reshape((-1, 3))
return points
def project_points(
object_points,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients,
remove_behind_camera=False,
remove_outside_frame=False,
image_corners=None
):
object_points = np.asarray(object_points).reshape((-1, 3))
rotation_vector = np.asarray(rotation_vector).reshape(3)
translation_vector = np.asarray(translation_vector).reshape(3)
camera_matrix = np.asarray(camera_matrix).reshape((3, 3))
distortion_coefficients = np.squeeze(np.asarray(distortion_coefficients))
if object_points.size == 0:
return np.zeros((0, 2))
image_points = cv.projectPoints(
object_points,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients
)[0]
if remove_behind_camera:
behind_camera_boolean = behind_camera(
object_points,
rotation_vector,
translation_vector
)
image_points[behind_camera_boolean] = np.array([np.nan, np.nan])
if remove_outside_frame:
outside_frame_boolean = outside_frame(
object_points,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients,
image_corners
)
image_points[outside_frame_boolean] = np.array([np.nan, np.nan])
image_points = np.squeeze(image_points)
return image_points
def behind_camera(
object_points,
rotation_vector,
translation_vector):
object_points = np.asarray(object_points)
rotation_vector = np.asarray(rotation_vector)
translation_vector = np.asarray(translation_vector)
if object_points.size == 0:
return np.zeros((0, 2))
object_points = object_points.reshape((-1, 3))
rotation_vector = rotation_vector.reshape(3)
translation_vector = translation_vector.reshape(3)
object_points_transformed = transform_object_points(
object_points,
rotation_vector,
translation_vector
)
behind_camera_boolean = (object_points_transformed <= 0)[..., 2]
return behind_camera_boolean
def outside_frame(
object_points,
rotation_vector,
translation_vector,
camera_matrix,
distortion_coefficients,
image_corners
):
object_points = np.asarray(object_points).reshape((-1, 3))
rotation_vector = np.asarray(rotation_vector)
translation_vector = np.asarray(translation_vector).reshape(3)
camera_matrix = np.asarray(camera_matrix).reshape((3,3))
distortion_coefficients = np.squeeze(np.asarray(distortion_coefficients))
image_corners = np.asarray(image_corners).reshape((2,2))
if object_points.size == 0:
return np.zeros((0, 2))
image_points = cv.projectPoints(
object_points,
rotation_vector,
translation_vector,
camera_matrix,
np.array([0.0, 0.0, 0.0, 0.0])
)[0]
image_points = image_points.reshape((-1, 2))
outside_frame_boolean = (
(image_points[:, 0] < image_corners[0, 0]) |
(image_points[:, 0] > image_corners[1, 0]) |
(image_points[:, 1] < image_corners[0, 1]) |
(image_points[:, 1] > image_corners[1, 1])
)
return outside_frame_boolean
def undistort_points(
image_points,
camera_matrix,
distortion_coefficients):
image_points = np.asarray(image_points)
camera_matrix = np.asarray(camera_matrix)
distortion_coefficients = np.asarray(distortion_coefficients)
if image_points.size == 0:
return image_points
image_points = image_points.reshape((-1, 1, 2))
camera_matrix = camera_matrix.reshape((3, 3))
undistorted_points = cv.undistortPoints(
image_points,
camera_matrix,
distortion_coefficients,
P=camera_matrix)
undistorted_points = np.squeeze(undistorted_points)
return undistorted_points
def estimate_camera_pose_from_image_points(
image_points_1,
image_points_2,
camera_matrix,
rotation_vector_1=np.array([0.0, 0.0, 0.0]),
translation_vector_1=np.array([0.0, 0.0, 0.0]),
distance_between_cameras=1.0):
image_points_1 = np.asarray(image_points_1)
image_points_2 = np.asarray(image_points_2)
camera_matrix = np.asarray(camera_matrix)
rotation_vector_1 = np.asarray(rotation_vector_1)
translation_vector_1 = np.asarray(translation_vector_1)
if image_points_1.size == 0 or image_points_2.size == 0:
raise ValueError('One or both sets of image points appear to be empty')
image_points_1 = image_points_1.reshape((-1, 2))
image_points_2 = image_points_2.reshape((-1, 2))
if image_points_1.shape != image_points_2.shape:
raise ValueError('Sets of image points do not appear to be the same shape')
camera_matrix = camera_matrix.reshape((3, 3))
rotation_vector_1 = rotation_vector_1.reshape(3)
translation_vector_1 = translation_vector_1.reshape(3)
essential_matrix, mask = cv.findEssentialMat(
image_points_1,
image_points_2,
camera_matrix)
relative_rotation_matrix, relative_translation_vector = cv.recoverPose(
essential_matrix,
image_points_1,
image_points_2,
camera_matrix,
mask=mask)[1:3]
relative_rotation_vector = cv.Rodrigues(relative_rotation_matrix)[0]
relative_translation_vector = relative_translation_vector * distance_between_cameras
rotation_vector_2, translation_vector_2 = compose_transformations(
rotation_vector_1,
translation_vector_1,
relative_rotation_vector,
relative_translation_vector)
rotation_vector_2 = np.squeeze(rotation_vector_2)
translation_vector_2 = np.squeeze(translation_vector_2)
return rotation_vector_2, translation_vector_2
def reconstruct_object_points_from_camera_poses(
image_points_1,
image_points_2,
camera_matrix,
rotation_vector_1,
translation_vector_1,
rotation_vector_2,
translation_vector_2):
image_points_1 = np.asarray(image_points_1)
image_points_2 = np.asarray(image_points_2)
camera_matrix = np.asarray(camera_matrix)
rotation_vector_1 = np.asarray(rotation_vector_1)
translation_vector_1 = np.asarray(translation_vector_1)
rotation_vector_2 = np.asarray(rotation_vector_2)
translation_vector_2 = np.asarray(translation_vector_2)
if image_points_1.size == 0 or image_points_2.size == 0:
return np.zeros((0, 3))
image_points_1 = image_points_1.reshape((-1, 2))
image_points_2 = image_points_2.reshape((-1, 2))
if image_points_1.shape != image_points_2.shape:
raise ValueError('Sets of image points do not appear to be the same shape')
camera_matrix = camera_matrix.reshape((3, 3))
rotation_vector_1 = rotation_vector_1.reshape(3)
translation_vector_1 = translation_vector_1.reshape(3)
rotation_vector_2 = rotation_vector_2.reshape(3)
translation_vector_2 = translation_vector_2.reshape(3)
projection_matrix_1 = generate_projection_matrix(
camera_matrix,
rotation_vector_1,
translation_vector_1)
projection_matrix_2 = generate_projection_matrix(
camera_matrix,
rotation_vector_2,
translation_vector_2)
object_points_homogeneous = cv.triangulatePoints(
projection_matrix_1,
projection_matrix_2,
image_points_1.T,
image_points_2.T)
object_points = cv.convertPointsFromHomogeneous(
object_points_homogeneous.T)
object_points = np.squeeze(object_points)
return object_points
def reconstruct_object_points_from_relative_camera_pose(
image_points_1,
image_points_2,
camera_matrix,
relative_rotation_vector,
relative_translation_vector,
rotation_vector_1=np.array([[0.0], [0.0], [0.0]]),
translation_vector_1=np.array([[0.0], [0.0], [0.0]]),
distance_between_cameras=1.0):
image_points_1 = np.asarray(image_points_1)
image_points_2 = np.asarray(image_points_2)
camera_matrix = np.asarray(camera_matrix)
relative_rotation_vector = np.asarray(relative_rotation_vector)
relative_translation_vector = np.asarray(relative_translation_vector)
rotation_vector_1 = np.asarray(rotation_vector_1)
translation_vector_1 = np.asarray(translation_vector_1)
if image_points_1.size == 0 or image_points_2.size == 0:
return np.zeros((0, 3))
image_points_1 = image_points_1.reshape((-1, 2))
image_points_2 = image_points_2.reshape((-1, 2))
if image_points_1.shape != image_points_2.shape:
raise ValueError('Sets of image points do not appear to be the same shape')
camera_matrix = camera_matrix.reshape((3, 3))
relative_rotation_vector = relative_rotation_vector.reshape(3)
relative_translation_vector = relative_translation_vector.reshape(3)
rotation_vector_1 = rotation_vector_1.reshape(3)
translation_vector_1 = translation_vector_1.reshape(3)
rotation_vector_2, translation_vector_2 = cv.composeRT(
rotation_vector_1,
translation_vector_1,
relative_rotation_vector,
relative_translation_vector * distance_between_cameras)[:2]
object_points = reconstruct_object_points_from_camera_poses(
image_points_1,
image_points_2,
camera_matrix,
rotation_vector_1,
translation_vector_1,
rotation_vector_2,
translation_vector_2)
return object_points
def reconstruct_object_points_from_image_points(
image_points_1,
image_points_2,
camera_matrix,
rotation_vector_1=np.array([[0.0], [0.0], [0.0]]),
translation_vector_1=np.array([[0.0], [0.0], [0.0]]),
distance_between_cameras=1.0):
image_points_1 = np.asarray(image_points_1)
image_points_2 = np.asarray(image_points_2)
camera_matrix = np.asarray(camera_matrix)
rotation_vector_1 = np.asarray(rotation_vector_1)
translation_vector_1 = np.asarray(translation_vector_1)
if image_points_1.size == 0 or image_points_2.size == 0:
return np.zeros((0, 3))
image_points_1 = image_points_1.reshape((-1, 2))
image_points_2 = image_points_2.reshape((-1, 2))
if image_points_1.shape != image_points_2.shape:
raise ValueError('Sets of image points do not appear to be the same shape')
camera_matrix = camera_matrix.reshape((3, 3))
rotation_vector_1 = rotation_vector_1.reshape(3)
translation_vector_1 = translation_vector_1.reshape(3)
rotation_vector_2, translation_vector_2 = estimate_camera_pose_from_image_points(
image_points_1,
image_points_2,
camera_matrix,
rotation_vector_1,
translation_vector_1,
distance_between_cameras)
object_points = reconstruct_object_points_from_camera_poses(
image_points_1,
image_points_2,
camera_matrix,
rotation_vector_1,
translation_vector_1,
rotation_vector_2,
translation_vector_2)
return object_points
def estimate_camera_pose_from_plane_object_points(
input_object_points,
height,
origin_index,
x_axis_index,
y_reference_point,
y_reference_point_sign,
distance_calibration_indices,
calibration_distance):
input_object_points = np.asarray(input_object_points)
if input_object_points.size == 0:
raise ValueError('Obect point array appears to be empty')
input_object_points = input_object_points.reshape((-1, 3))
scale_factor = np.divide(
calibration_distance,
np.linalg.norm(
np.subtract(
input_object_points[distance_calibration_indices[0]],
input_object_points[distance_calibration_indices[1]])))
object_points_1 = np.multiply(
input_object_points,
scale_factor)
def objective_function(parameters):
rotation_x = parameters[0]
rotation_y = parameters[1]
translation_z = parameters[2]
object_points_transformed = transform_object_points(
object_points_1,
np.array([rotation_x, rotation_y, 0.0]),
np.array([0.0, 0.0, translation_z]))
return np.sum(np.square(object_points_transformed[:, 2] - height))
optimization_solution = scipy.optimize.minimize(
objective_function,
np.array([0.0, 0.0, 0.0]))
rotation_x_a = optimization_solution['x'][0]
rotation_y_a = optimization_solution['x'][1]
translation_z_a = optimization_solution['x'][2]
rotation_x_rotation_y_a_norm = np.linalg.norm([rotation_x_a, rotation_y_a])
rotation_x_b = rotation_x_a * ((rotation_x_rotation_y_a_norm + np.pi) / rotation_x_rotation_y_a_norm)
rotation_y_b = rotation_y_a * ((rotation_x_rotation_y_a_norm + np.pi) / rotation_x_rotation_y_a_norm)
translation_z_b = - translation_z_a
rotation_vector_2_a = np.array([rotation_x_a, rotation_y_a, 0.0])
translation_vector_2_a = np.array([0.0, 0.0, translation_z_a])
object_points_2_a = transform_object_points(
object_points_1,
rotation_vector_2_a,
translation_vector_2_a)
rotation_vector_2_b = np.array([rotation_x_b, rotation_y_b, 0.0])
translation_vector_2_b = np.array([0.0, 0.0, translation_z_b])
object_points_2_b = transform_object_points(
object_points_1,
rotation_vector_2_b,
translation_vector_2_b)
sign_a = np.sign(
np.cross(
np.subtract(
object_points_2_a[x_axis_index],
object_points_2_a[origin_index]),
np.subtract(
object_points_2_a[y_reference_point],
object_points_2_a[origin_index]))[2])
sign_b = np.sign(
np.cross(
np.subtract(
object_points_2_b[x_axis_index],
object_points_2_b[origin_index]),
np.subtract(
object_points_2_b[y_reference_point],
object_points_2_b[origin_index]))[2])
if sign_a == y_reference_point_sign:
rotation_vector_2 = rotation_vector_2_a
translation_vector_2 = translation_vector_2_a
object_points_2 = object_points_2_a
else:
rotation_vector_2 = rotation_vector_2_b
translation_vector_2 = translation_vector_2_b
object_points_2 = object_points_2_b
xy_shift = - object_points_2[origin_index, :2]
rotation_vector_3 = np.array([0.0, 0.0, 0.0])
translation_vector_3 = np.array([xy_shift[0], xy_shift[1], 0.0])
object_points_3 = transform_object_points(
object_points_2,
rotation_vector_3,
translation_vector_3)
final_z_rotation = - reconstruct_z_rotation(
object_points_3[x_axis_index, 0],
object_points_3[x_axis_index, 1])
rotation_vector_4 = np.array([0.0, 0.0, final_z_rotation])
translation_vector_4 = np.array([0.0, 0.0, 0.0])
object_points_4 = transform_object_points(
object_points_3,
rotation_vector_4,
translation_vector_4)
rotation_vector_2_3, translation_vector_2_3 = compose_transformations(
rotation_vector_2,
translation_vector_2,
rotation_vector_3,
translation_vector_3)
rotation_vector_2_3_4, translation_vector_2_3_4 = compose_transformations(
rotation_vector_2_3,
translation_vector_2_3,
rotation_vector_4,
translation_vector_4)
camera_rotation_vector, camera_translation_vector = invert_transformation(
rotation_vector_2_3_4,
translation_vector_2_3_4)
return camera_rotation_vector, camera_translation_vector, scale_factor, object_points_4
def estimate_camera_poses_from_plane_image_points(
image_points_1,
image_points_2,
camera_matrix,
height,
origin_index,
x_axis_index,
y_reference_point,
y_reference_point_sign,
distance_calibration_indices,
calibration_distance):
image_points_1 = np.asarray(image_points_1)
image_points_2 = np.asarray(image_points_2)
camera_matrix = np.asarray(camera_matrix)
if image_points_1.size == 0 or image_points_2.size == 0:
raise ValueError('One or both sets of image points appear to be empty')
image_points_1 = image_points_1.reshape((-1, 2))
image_points_2 = image_points_2.reshape((-1, 2))
if image_points_1.shape != image_points_2.shape:
raise ValueError('Sets of image points do not appear to be the same shape')
camera_matrix = camera_matrix.reshape((3, 3))
relative_rotation_vector, relative_translation_vector = estimate_camera_pose_from_image_points(
image_points_1,
image_points_2,
camera_matrix)
input_object_points = reconstruct_object_points_from_image_points(
image_points_1,
image_points_2,
camera_matrix)
rotation_vector_1, translation_vector_1, scale_factor = estimate_camera_pose_from_plane_object_points(
input_object_points,
height,
origin_index,
x_axis_index,
y_reference_point,
y_reference_point_sign,
distance_calibration_indices,
calibration_distance)[:3]
rotation_vector_2, translation_vector_2 = compose_transformations(
rotation_vector_1,
translation_vector_1,
relative_rotation_vector,
relative_translation_vector * scale_factor)
return rotation_vector_1, translation_vector_1, rotation_vector_2, translation_vector_2
| [
[
[
7,
24
]
],
[
[
32,
41
],
[
614,
616
],
[
2561,
2563
],
[
3212,
3214
],
[
6424,
6426
],
[
7884,
7886
],
[
10553,
10555
],
[
10787,
10789
],
[
11076,
11078
],
[
12286,
12288
],
[
14419,
14421
],
[
15357,
15359
],
[
16662,
16664
],
[
16814,
16816
],
[
16982,
16984
],
[
18994,
18996
],
[
19146,
19148
],
[
20730,
20732
]
],
[
[
49,
60
],
[
2698,
2700
],
[
2752,
2754
],
[
3460,
3462
],
[
8175,
8177
],
[
8864,
8866
],
[
10032,
10034
],
[
15709,
15711
],
[
15765,
15767
],
[
19514,
19516
],
[
19576,
19578
],
[
21326,
21328
],
[
21388,
21390
],
[
306,
308
],
[
374,
376
],
[
442,
444
],
[
510,
512
],
[
777,
779
],
[
848,
850
],
[
1058,
1060
],
[
1122,
1124
],
[
1247,
1249
],
[
1337,
1339
],
[
1390,
1392
],
[
1451,
1453
],
[
1627,
1629
],
[
1769,
1771
],
[
1869,
1871
],
[
2065,
2067
],
[
2153,
2155
],
[
2500,
2502
],
[
2800,
2802
],
[
2848,
2850
],
[
2901,
2903
],
[
3173,
3175
],
[
3189,
3191
],
[
3347,
3349
],
[
3838,
3840
],
[
3956,
3958
],
[
4178,
4180
],
[
4206,
4208
],
[
4246,
4248
],
[
4346,
4348
],
[
4357,
4359
],
[
4417,
4419
],
[
4524,
4526
],
[
4583,
4585
],
[
5249,
5251
],
[
5302,
5304
],
[
5487,
5489
],
[
5551,
5553
],
[
5755,
5757
],
[
5805,
5807
],
[
5972,
5974
],
[
6040,
6042
],
[
6097,
6099
],
[
6276,
6278
],
[
6340,
6342
],
[
6405,
6407
],
[
6467,
6469
],
[
6523,
6525
],
[
6661,
6663
],
[
6722,
6724
],
[
6745,
6747
],
[
6762,
6764
],
[
6781,
6783
],
[
6969,
6971
],
[
7213,
7215
],
[
7271,
7273
],
[
7330,
7332
],
[
7619,
7621
],
[
7683,
7685
],
[
7747,
7749
],
[
7813,
7815
],
[
7855,
7857
],
[
8939,
8941
],
[
9088,
9090
],
[
9439,
9441
],
[
9490,
9492
],
[
9839,
9841
],
[
10084,
10086
],
[
10130,
10132
],
[
10183,
10185
],
[
10234,
10236
],
[
10290,
10292
],
[
10710,
10712
],
[
10768,
10770
],
[
10895,
10897
],
[
10914,
10916
],
[
10952,
10954
],
[
11057,
11059
],
[
11372,
11374
],
[
11389,
11391
],
[
11455,
11457
],
[
11533,
11535
],
[
11559,
11561
],
[
11892,
11894
],
[
11957,
11959
],
[
12021,
12023
],
[
12083,
12085
],
[
12155,
12157
],
[
12166,
12168
],
[
12250,
12252
],
[
12663,
12665
],
[
12673,
12675
],
[
12681,
12683
],
[
13000,
13002
],
[
13010,
13012
],
[
13018,
13020
],
[
13046,
13048
],
[
13212,
13214
],
[
13260,
13262
],
[
13313,
13315
],
[
13391,
13393
],
[
13976,
13978
],
[
14041,
14043
],
[
14094,
14096
],
[
14156,
14158
],
[
14227,
14229
],
[
14238,
14240
],
[
14295,
14297
],
[
14383,
14385
],
[
14544,
14546
],
[
15034,
15036
],
[
15079,
15081
],
[
15135,
15137
],
[
15505,
15507
],
[
15852,
15854
],
[
15900,
15902
],
[
15947,
15949
],
[
15997,
15999
],
[
16054,
16056
],
[
17336,
17338
],
[
17393,
17395
],
[
17735,
17737
],
[
17783,
17785
],
[
17830,
17832
],
[
17880,
17882
],
[
17937,
17939
],
[
17994,
17996
],
[
18051,
18053
],
[
18160,
18162
],
[
19236,
19238
],
[
19669,
19671
],
[
19717,
19719
],
[
19764,
19766
],
[
19821,
19823
],
[
19892,
19894
],
[
19956,
19958
],
[
20013,
20015
],
[
20122,
20124
],
[
21481,
21483
],
[
21529,
21531
],
[
21576,
21578
],
[
21626,
21628
],
[
21683,
21685
],
[
21792,
21794
],
[
23033,
23035
],
[
23252,
23254
],
[
23301,
23303
],
[
23329,
23331
],
[
23507,
23509
],
[
24077,
24079
],
[
24291,
24293
],
[
24404,
24406
],
[
24510,
24512
],
[
24616,
24618
],
[
24689,
24691
],
[
24889,
24891
],
[
24962,
24964
],
[
25149,
25151
],
[
25166,
25168
],
[
25188,
25190
],
[
25312,
25314
],
[
25447,
25449
],
[
25464,
25466
],
[
25486,
25488
],
[
25610,
25612
],
[
26152,
26154
],
[
26205,
26207
],
[
26535,
26537
],
[
26601,
26603
],
[
27715,
27717
],
[
27763,
27765
],
[
27810,
27812
],
[
23822,
23824
],
[
23875,
23877
],
[
23927,
23929
],
[
23934,
23936
]
],
[
[
68,
92
]
],
[
[
100,
114
],
[
24016,
24021
]
],
[
[
122,
126
]
],
[
[
134,
136
]
],
[
[
142,
165
],
[
1214,
1237
],
[
4690,
4713
],
[
4883,
4906
],
[
5072,
5095
],
[
5643,
5666
],
[
17159,
17182
],
[
26807,
26830
],
[
27001,
27024
],
[
28956,
28979
]
],
[
[
959,
980
],
[
27201,
27222
]
],
[
[
1546,
1582
]
],
[
[
1979,
2015
]
],
[
[
2425,
2459
]
],
[
[
2626,
2649
],
[
13595,
13618
],
[
24751,
24774
],
[
25024,
25047
],
[
26269,
26292
],
[
26649,
26672
],
[
23756,
23779
]
],
[
[
3414,
3434
]
],
[
[
5386,
5409
]
],
[
[
5872,
5911
]
],
[
[
6174,
6198
]
],
[
[
6586,
6608
],
[
7019,
7041
],
[
26402,
26424
]
],
[
[
6884,
6917
]
],
[
[
7122,
7144
]
],
[
[
7494,
7520
],
[
18720,
18746
],
[
18854,
18880
]
],
[
[
8023,
8046
]
],
[
[
8707,
8735
],
[
8264,
8292
]
],
[
[
9909,
9921
],
[
9155,
9167
]
],
[
[
11295,
11315
],
[
8598,
8618
]
],
[
[
11654,
11668
]
],
[
[
13100,
13113
],
[
12505,
12518
]
],
[
[
13808,
13821
],
[
12751,
12764
]
],
[
[
14918,
14934
]
],
[
[
15572,
15610
],
[
22260,
22298
],
[
28330,
28368
]
],
[
[
17483,
17526
],
[
20923,
20966
],
[
22482,
22525
]
],
[
[
19293,
19344
]
],
[
[
21184,
21227
],
[
28467,
28510
]
],
[
[
22743,
22788
],
[
28643,
28688
]
],
[
[
27388,
27433
]
]
] |
"""
Client
Run by the evaluator, sends a TLS Client Hello with the ESNI extension, followed by two test packets.
"""
import argparse
import binascii as bi
import os
import socket
import time
socket.setdefaulttimeout(1)
from plugins.plugin_client import ClientPlugin
class ESNIClient(ClientPlugin):
"""
Defines the ESNI client.
"""
name = "esni"
def __init__(self, args):
"""
Initializes the esni client.
"""
ClientPlugin.__init__(self)
self.args = args
@staticmethod
def get_args(command):
"""
Defines required args for this plugin
"""
super_args = ClientPlugin.get_args(command)
parser = argparse.ArgumentParser(description='ESNI Client')
parser.add_argument('--server', action='store', help="server to connect to")
args, _ = parser.parse_known_args(command)
args = vars(args)
super_args.update(args)
return super_args
def run(self, args, logger, engine=None):
"""
Try to make a forbidden GET request to the server.
"""
fitness = 0
port = int(args["port"])
server = args["server"]
# Client Hello with the ESNI extension
msg = b'16030103ae010003aa0303d992f9c22fbe7a7cdbc9619924bd9cc13c057f5f3da1829426cb0944292705152033c5be80af6de7633e07680125e27e3f7b80ff5e9b3cbe5278434c90b9e0e5fa0024130113031302c02bc02fcca9cca8c02cc030c00ac009c013c014009c009d002f0035000a0100033d00170000ff01000100000a000e000c001d00170018001901000101000b000201000010000e000c02683208687474702f312e310005000501000000000033006b0069001d002019570ada256d971048b34d3e9ff5607588bf10cfb6c064fc45a0fc401d9a7c470017004104ea047fd2e0fc3314de4bf03ee6205134f0d15c07f62b77625a95dc194ce8fb88cc16e53c8b400ba463915b87480b247851c095abdb0d3d5d5b14dd77dcd73750002b00050403040303000d0018001604030503060308040805080604010501060102030201002d00020101ffce016e1301001d00203652aaf122dc47dcf9fa8c37377476d050e54119adfb518f7aabd842ac97d23b00205a30e70593f57708370310ecf7054e488a62eb11e01fd059851c442d453d15c5012441910eec152c4df5ff28bf5cddb1a2e54e8595197e3dc36325145ad50a7842eb3860c8fc6ac5c1794017101365c6122abb3b81f31f5f4204eebb244252d22600734424d875948657b892d3aab3310491aff3b5126f1186bd9c321fb446cf2a41985dd206364ea28c3f8aafeafc62e039f157c3f2703a35448d2d16dcf2d5055ce58c024a5b4eb780fc5128af4ba4e90d6eef1b3cf30a5b2000448d65d6af4fffabeb91e1ed2093fdcc6ffd87ceb94429864ddb657e6316654631193fd25840e51645e1708d351140dd6eeefb80ddbaebb250b2975a1d5f291d99f89de4553d083f1b9820a3ee6976357cff433b7eb77febb3eb0db012154154d3e19b4409f8afa11aa1baeb0b7663d97f0caca2b11ed971fc574588e76a37aa4259593fe8e07fbbca27fa001c00024001002900eb00c600c07f87fafe9de4168227aeec4540f1aaeae43ff61a353f5480420ac3c33f90003fe6f501080bf04f22576a0cc1db8dc83d37b25859a81ce0277364a1794cde1c60f3b94175477beff56db7f9e2b83b31383b7d8b5da20834fb0a63d7ba2e42ad3dfa21666ed8621f34273ac5c273d7f492750e3df3bae36e398ddf83d4a7c36f639087f14eb1f7bfb2c7c0c736d69bcdbf21158c07b7088b95e5bcd08138d6b511f6492d7d93bb3729641519097b970cfeffa5882c67111dcf5d7966a1c58b4edb6e8c905a002120e47ccba37d89e4c1d979c6ef954d1cd946eff0d3119aa2b4d6411138aec74579'
try:
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.settimeout(5)
client.connect((server, port))
client.sendall(bi.unhexlify(msg))
time.sleep(2)
client.sendall(b"test packet")
time.sleep(2)
client.sendall(b"test packet 2")
server_data = client.recv(1024)
logger.debug("Data recieved: %s", server_data.decode('utf-8', 'ignore'))
fitness += 100
client.close()
except socket.timeout:
# Happens on connect, not sendall
logger.debug("Client: Timeout")
fitness -= 110
except socket.error as exc:
fitness -= 100
logger.exception("Socket error caught in client esni test.")
except Exception:
logger.exception("Exception caught in client esni test.")
fitness = -120
finally:
logger.debug("Client finished esni test.")
return fitness * 4
| [
[
[
126,
134
],
[
705,
713
]
],
[
[
142,
156
],
[
3340,
3342
]
],
[
[
164,
166
]
],
[
[
174,
180
],
[
194,
200
],
[
3187,
3193
],
[
3201,
3207
],
[
3217,
3223
],
[
3697,
3703
],
[
3845,
3851
]
],
[
[
188,
192
],
[
3371,
3375
],
[
3440,
3444
]
],
[
[
257,
269
],
[
289,
301
],
[
467,
479
],
[
657,
669
]
],
[
[
278,
288
]
]
] |
"""
Dump/export our own data to a local file.
Script is installed as `location_dump`.
"""
import argparse
import os
import os.path
import sys
from sqlalchemy import text
from ichnaea.db import (
configure_db,
db_worker_session,
)
from ichnaea.geocalc import bbox
from ichnaea.log import (
configure_logging,
LOGGER,
)
from ichnaea.models import (
BlueShard,
CellShard,
WifiShard,
)
from ichnaea import util
def where_area(lat, lon, radius):
# Construct a where clause based on a bounding box around the given
# center point.
if lat is None or lon is None or radius is None:
return None
max_lat, min_lat, max_lon, min_lon = bbox(lat, lon, radius)
return '`lat` <= %s and `lat` >= %s and `lon` <= %s and `lon` >= %s' % (
round(max_lat, 5), round(min_lat, 5),
round(max_lon, 5), round(min_lon, 5))
def dump_model(shard_model, session, fd, where=None):
fd.write(shard_model.export_header() + '\n')
for model in shard_model.shards().values():
LOGGER.info('Exporting table: %s', model.__tablename__)
stmt = model.export_stmt()
if where:
stmt = stmt.replace(' WHERE ', ' WHERE %s AND ' % where)
stmt = text(stmt)
min_key = ''
limit = 25000
while True:
rows = session.execute(
stmt.bindparams(
export_key=min_key,
limit=limit
)).fetchall()
if rows:
buf = '\n'.join([row.export_value for row in rows])
if buf:
buf += '\n'
fd.write(buf)
min_key = rows[-1].export_key
else:
break
def dump_file(datatype, session, filename,
lat=None, lon=None, radius=None):
model = {
'blue': BlueShard,
'cell': CellShard,
'wifi': WifiShard,
}
where = where_area(lat, lon, radius)
with util.gzip_open(filename, 'w') as fd:
dump_model(model[datatype], session, fd, where=where)
return 0
def main(argv, _db=None, _dump_file=dump_file):
parser = argparse.ArgumentParser(
prog=argv[0], description='Dump/export data.')
parser.add_argument('--datatype', required=True,
help='Type of the data file, blue, cell or wifi')
parser.add_argument('--filename', required=True,
help='Path to the csv.gz export file.')
parser.add_argument('--lat', default=None,
help='The center latitude of the desired area.')
parser.add_argument('--lon', default=None,
help='The center longitude of the desired area.')
parser.add_argument('--radius', default=None,
help='The radius of the desired area.')
args = parser.parse_args(argv[1:])
if not args.filename: # pragma: no cover
parser.print_help()
return 1
filename = os.path.abspath(os.path.expanduser(args.filename))
if os.path.isfile(filename): # pragma: no cover
print('File already exists.')
return 1
datatype = args.datatype
if datatype not in ('blue', 'cell', 'wifi'): # pragma: no cover
print('Unknown data type.')
return 1
lat, lon, radius = (None, None, None)
if (args.lat is not None and
args.lon is not None and args.radius is not None):
lat = float(args.lat)
lon = float(args.lon)
radius = int(args.radius)
configure_logging()
db = configure_db('ro', transport='sync', _db=_db)
with db_worker_session(db, commit=False) as session:
exit_code = _dump_file(
datatype, session, filename, lat=lat, lon=lon, radius=radius)
return exit_code
def console_entry(): # pragma: no cover
sys.exit(main(sys.argv))
| [
[
[
99,
107
],
[
2153,
2161
]
],
[
[
115,
117
]
],
[
[
125,
132
],
[
2979,
2981
],
[
2995,
2997
],
[
3037,
3039
]
],
[
[
140,
143
],
[
3835,
3838
],
[
3849,
3852
]
],
[
[
168,
172
],
[
1228,
1232
]
],
[
[
203,
215
],
[
3558,
3570
]
],
[
[
221,
238
],
[
3613,
3630
]
],
[
[
270,
274
],
[
681,
685
]
],
[
[
305,
322
],
[
3528,
3545
]
],
[
[
328,
334
],
[
1035,
1041
]
],
[
[
371,
380
],
[
1857,
1866
]
],
[
[
386,
395
],
[
1884,
1893
]
],
[
[
401,
410
],
[
1911,
1920
]
],
[
[
434,
438
],
[
1978,
1982
]
],
[
[
445,
455
],
[
1940,
1950
]
],
[
[
880,
890
],
[
2023,
2033
]
],
[
[
1740,
1749
],
[
2128,
2137
]
],
[
[
2096,
2100
],
[
3844,
3848
]
],
[
[
3794,
3807
]
]
] |
#!/usr/bin/python
import serial
import time
ser = serial.Serial(
port = '/dev/ttyACM1',
baudrate = 9600,
parity = serial.PARITY_NONE,
stopbits = serial.STOPBITS_ONE,
bytesize = serial.EIGHTBITS
)
while 1:
ser.flush()
line = ser.readline().decode().strip()
gas, fire = line.split(",")
print("gas-level: ", gas)
print("fire-level: ", fire)
time.sleep(1)
| [
[
[
26,
32
],
[
53,
59
],
[
142,
148
],
[
181,
187
],
[
221,
227
]
],
[
[
40,
44
],
[
411,
415
]
],
[
[
47,
50
],
[
258,
261
],
[
281,
284
]
],
[
[
274,
278
],
[
329,
333
]
],
[
[
317,
320
],
[
370,
373
]
],
[
[
322,
326
],
[
401,
405
]
]
] |
"""
Convert an RDF graph into an image for displaying in the notebook, via GraphViz
It has two parts:
- conversion from rdf into dot language. Code based in rdflib.utils.rdf2dot
- rendering of the dot graph into an image. Code based on
ipython-hierarchymagic, which in turn bases it from Sphinx
See https://github.com/tkf/ipython-hierarchymagic
License for RDFLIB
------------------
Copyright (c) 2002-2015, RDFLib Team
See CONTRIBUTORS and http://github.com/RDFLib/rdflib
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Daniel Krech nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
License for ipython-hierarchymagic
----------------------------------
ipython-hierarchymagic is licensed under the term of the Simplified
BSD License (BSD 2-clause license), as follows:
Copyright (c) 2012 Takafumi Arakaki
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
License for Sphinx
------------------
`run_dot` function and `HierarchyMagic._class_name` method in this
extension heavily based on Sphinx code `sphinx.ext.graphviz.render_dot`
and `InheritanceGraph.class_name`.
Copyright notice for Sphinx can be found below.
Copyright (c) 2007-2011 by the Sphinx team (see AUTHORS file).
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import errno
import base64
import re
from io import StringIO
import rdflib
from .utils import escape
import logging
LOG = logging.getLogger(__name__)
# ------------------------------------------------------------------------
LABEL_PROPERTIES = [
rdflib.RDFS.label,
rdflib.URIRef('http://schema.org/name'),
rdflib.URIRef('http://www.w3.org/2000/01/rdf-schema#label'),
rdflib.URIRef('http://www.w3.org/2004/02/skos/core#prefLabel'),
rdflib.URIRef("http://purl.org/dc/elements/1.1/title"),
rdflib.URIRef("http://xmlns.com/foaf/0.1/name"),
rdflib.URIRef("http://www.w3.org/2006/vcard/ns#fn"),
rdflib.URIRef("http://www.w3.org/2006/vcard/ns#org"),
]
def label(x, gr, preferred_languages=None):
'''
@param x: graph entity
@param gr (Graph): RDF graph
@param preferred_languages (iterable): list of preferred language codes for
the labels.
Return the best available label in the graph for the passed entity.
If a set of preferred languages is given, try them in order. If none is
found, an arbitrary language will be chosen
'''
# Find all labels & their language
labels = {l.language: l
for labelProp in LABEL_PROPERTIES
for l in gr.objects(x, labelProp)}
#LOG.debug("LABELS %s %s", labels, preferred_languages)
#return repr(preferred_languages) + repr(labels)
if labels:
# Search the preferred language
if preferred_languages is not None:
for l in preferred_languages:
if l in labels:
return labels[l]
# If not found, return an arbitrary language
return labels.popitem()[1]
# No labels available. Try to generate a QNAME, or else, the string itself
try:
return gr.namespace_manager.compute_qname(x)[2].replace('_', ' ')
except Exception:
# Attempt to extract the trailing part of an URI
m = re.search('([^/]+)$', x)
return m.group(1).replace('_', ' ') if m else x
def rdf2dot(g, stream, opts={}):
'''
Convert the RDF graph to DOT
Write the dot output to the stream
'''
LOG.debug("RDF2DOT: %s", opts)
accept_lang = opts.get('lang', [])
do_literal = opts.get('literal')
nodes = {}
def node_id(x):
if x not in nodes:
nodes[x] = "node%d" % len(nodes)
return nodes[x]
def qname(x, g):
try:
q = g.compute_qname(x)
return q[0] + ":" + q[2]
except Exception:
return x
def accept(node):
if isinstance(node, (rdflib.URIRef, rdflib.BNode)):
return True
if not do_literal:
return False
return (not accept_lang) or (node.language in accept_lang)
stream.write(u'digraph { \n node [ fontname="DejaVu Sans,Tahoma,Geneva,sans-serif" ] ; \n')
# Write all edges. In the process make a list of all nodes
for s, p, o in g:
# skip triples for labels
if p == rdflib.RDFS.label:
continue
# Create a link if both objects are graph nodes
# (or, if literals are also included, if their languages match)
if not (accept(s) and accept(o)):
continue
# add the nodes to the list
sn = node_id(s)
on = node_id(o)
# add the link
q = qname(p, g)
if isinstance(p, rdflib.URIRef):
opstr = u'\t%s -> %s [ arrowhead="open", color="#9FC9E560", fontsize=9, fontcolor="#204080", label="%s", href="%s", target="_other" ] ;\n' % (sn, on, q, p)
else:
opstr = u'\t%s -> %s [ arrowhead="open", color="#9FC9E560", fontsize=9, fontcolor="#204080", label="%s" ] ;\n' % (sn, on, q)
stream.write(opstr)
# Write all nodes
for u, n in nodes.items():
lbl = escape(label(u, g, accept_lang), True)
if isinstance(u, rdflib.URIRef):
opstr = u'%s [ shape=none, fontsize=10, fontcolor=%s, label="%s", href="%s", target=_other ] \n' % (n, 'blue', lbl, u)
else:
opstr = u'%s [ shape=none, fontsize=10, fontcolor=%s, label="%s" ] \n' % (n, 'black', lbl)
stream.write(u"# %s %s\n" % (u, n))
stream.write(opstr)
stream.write(u'}\n')
# ------------------------------------------------------------------------
EPIPE = getattr(errno, 'EPIPE', 0)
EINVAL = getattr(errno, 'EINVAL', 0)
def run_dot(code, fmt='svg', gv_options=[], **kwargs):
'''
Run GraphViz on the buffer holding the graph
'''
LOG.debug("rundot fmt=%s options=%s", fmt, gv_options)
# mostly copied from sphinx.ext.graphviz.render_dot
import os
from subprocess import Popen, PIPE
dot_args = [kwargs.get('prg', 'dot')] + gv_options + ['-T', fmt]
if os.name == 'nt':
# Avoid opening shell window.
# * https://github.com/tkf/ipython-hierarchymagic/issues/1
# * http://stackoverflow.com/a/2935727/727827
p = Popen(dot_args, stdout=PIPE, stdin=PIPE, stderr=PIPE,
creationflags=0x08000000)
else:
p = Popen(dot_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)
wentwrong = False
try:
# Graphviz may close standard input when an error occurs,
# resulting in a broken pipe on communicate()
stdout, stderr = p.communicate(code.encode('utf-8'))
except OSError as err:
if err.errno != EPIPE:
raise
wentwrong = True
except IOError as err:
if err.errno != EINVAL:
raise
wentwrong = True
if wentwrong:
# in this case, read the standard output and standard error streams
# directly, to get the error message(s)
stdout, stderr = p.stdout.read(), p.stderr.read()
p.wait()
if p.returncode != 0:
raise RuntimeError(u'dot exited with error:\n[stderr]\n{0}'
.format(stderr.decode('utf-8')))
return stdout
# ------------------------------------------------------------------------
def draw_graph(g, fmt='svg', prg='dot', options={}):
'''
Draw an RDF graph as an image
'''
# Convert RDF to Graphviz
buf = StringIO()
rdf2dot(g, buf, options)
gv_options = options.get('graphviz', [])
if fmt == 'png':
gv_options += ['-Gdpi=220', '-Gsize=25,10!']
metadata = {"width": 5500, "height": 2200, "unconfined": True}
#import codecs
#with codecs.open('/tmp/sparqlkernel-img.dot','w',encoding='utf-8') as f:
# f.write( buf.getvalue() )
# Now use Graphviz to generate the graph
image = run_dot(buf.getvalue(), fmt=fmt, options=gv_options, prg=prg)
#with open('/tmp/sparqlkernel-img.'+fmt,'w') as f:
# f.write( image )
# Return it
if fmt == 'png':
return {'image/png': base64.b64encode(image).decode('ascii')}, \
{'image/png': metadata}
elif fmt == 'svg':
img = image.decode('utf-8').replace('<svg', '<svg class="unconfined"', 1)
return {'image/svg+xml': img}, \
{'unconfined': True}
| [
[
[
4998,
5003
],
[
9334,
9339
],
[
9370,
9375
]
],
[
[
5011,
5017
],
[
11778,
11784
]
],
[
[
5025,
5027
],
[
6929,
6931
]
],
[
[
5043,
5051
],
[
11145,
11153
]
],
[
[
5060,
5066
],
[
5246,
5252
],
[
5269,
5275
],
[
5314,
5320
],
[
5379,
5385
],
[
5447,
5453
],
[
5507,
5513
],
[
5560,
5566
],
[
5617,
5623
],
[
7992,
7998
],
[
8382,
8388
],
[
8877,
8883
],
[
7584,
7590
],
[
7599,
7605
]
],
[
[
5087,
5093
],
[
8813,
8819
]
],
[
[
5102,
5109
],
[
5116,
5123
]
],
[
[
5110,
5113
],
[
7138,
7141
],
[
9516,
9519
]
],
[
[
5221,
5237
],
[
6198,
6214
]
],
[
[
5681,
5686
],
[
8820,
8825
]
],
[
[
7017,
7024
],
[
11160,
11167
]
],
[
[
9317,
9322
],
[
10382,
10387
]
],
[
[
9353,
9359
],
[
10483,
10489
]
],
[
[
9396,
9403
],
[
11567,
11574
]
],
[
[
11006,
11016
]
]
] |
"""
LUME-Genesis primary class
"""
from genesis import archive, lattice, parsers, tools, writers
import h5py
import tempfile
from time import time
import shutil
import os
def find_genesis2_executable(genesis_exe=None, verbose=False):
"""
Searches for the genesis2 executable.
"""
if genesis_exe:
exe = tools.full_path(genesis_exe)
if os.path.exists(exe):
if verbose:
print(f'Using user provided executable: {exe}')
return exe
else:
raise ValueError(f'Genesis executable does not exist: {exe}')
for exe in [tools.full_path('$GENESIS_BIN'), shutil.which('genesis2')]:
if os.path.exists(exe):
if verbose:
print(f'Using found executable: {exe}')
return exe
raise ValueError('No Genesisi executable found')
class Genesis:
"""
LUME-Genesis class to parse input, run genesis, and parse output.
By default, a temporary directory is created for working.
"""
def __init__(self, input_file=None,
genesis_bin=None,
use_tempdir=True,
workdir=None,
verbose=False
):
# Save init
self.original_input_file = input_file
self.use_tempdir = use_tempdir
self.workdir = workdir
if workdir:
assert os.path.exists(workdir), 'workdir does not exist: '+workdir
self.verbose=verbose
self.genesis_bin = find_genesis2_executable(genesis_bin, verbose=verbose)
self.binary_prefixes = [] # For example, ['mpirun', '-n', '2']
self.finished = False
#
self.output = {}
#
self.timeout = None
# Run control
self.finished = False
self.configured = False
if input_file:
self.load_input(input_file)
self.configure()
else:
self.vprint('Warning: Input file does not exist. Not configured. Please call .load_input(input_file) and .configure()')
def configure(self):
self.configure_genesis(workdir=self.workdir)
def configure_genesis(self, input_filePath=None, workdir=None):
"""
Configures working directory.
"""
if input_filePath:
self.load_input(input_filePath)
# Set paths
if self.use_tempdir:
# Need to attach this to the object. Otherwise it will go out of scope.
self.tempdir = tempfile.TemporaryDirectory(dir=self.workdir)
self.path = self.tempdir.name
else:
if workdir:
self.path = workdir
self.tempdir = None
else:
# Work in place
self.path = self.original_path
# Make full path
self.input_file = os.path.join(self.path, 'genesis.in')
self.vprint('Configured to run in:', self.path)
self.configured = True
# Conveniences
@property
def beam(self):
return self.input['beam']
@property
def lattice(self):
try:
return self.input['lattice']
except:
print('No lattice found, assuming lattice is defined in input file.')
return None
@property
def param(self):
return self.input['param']
def load_input(self, filePath):
"""
Loads existing input file, with lattice
"""
assert os.path.exists(filePath), f'Input file does not exist: {filePath}'
f = tools.full_path(filePath)
self.original_path, self.input_file = os.path.split(f) # Get original path, name of main input
self.input = {
'beam':None
}
d = self.input
main = parsers.parse_main_inputfile(filePath)
d['param'] = main
if main['beamfile'] != '':
fname = main['beamfile']
d['beam'] = parsers.parse_beam_file(main['beamfile'], verbose=self.verbose)
# Use this new name
main['beamfile'] = parsers.POSSIBLE_INPUT_FILES['beamfile']
else:
d['beam'] = None
if main['maginfile'] != '':
self.load_lattice(filePath=main['maginfile'], verbose=self.verbose)
# Use this new name
main['maginfile'] = parsers.POSSIBLE_INPUT_FILES['maginfile']
else:
main['lattice'] = None
def load_output(self, filePath=None):
if not filePath:
fname = os.path.join(self.path, self.param['outputfile'])
else:
fname = filePath
if os.path.exists(fname):
self.output.update(parsers.parse_genesis_out(fname))
self.vprint('Loaded output:', fname)
# Final field
dflfile = fname+'.dfl'
if os.path.exists(dflfile):
self.output['data']['dfl'] = parsers.parse_genesis_dfl(dflfile, self.param['ncar'])
self.vprint('Loaded dfl:', dflfile)
# Field history
fldfile = fname+'.fld'
if os.path.exists(fldfile):
# Time independent is just one slice
if self['itdp'] == 0:
nslice = 1
else:
nslice = self.param['nslice']
self.output['data']['fld'] = parsers.parse_genesis_fld(fldfile, self.param['ncar'], nslice)
self.vprint('Loaded fld:', fldfile)
# Final particles
dpafile = fname+'.dpa'
if os.path.exists(dpafile):
self.output['data']['dpa'] = parsers.parse_genesis_dpa(dpafile, self.param['npart'])
self.vprint('Loaded dpa:', dpafile)
# Particle history
parfile = fname+'.par'
if os.path.exists(parfile):
self.output['data']['par'] = parsers.parse_genesis_dpa(parfile, self.param['npart'])
self.vprint('Loaded par:', parfile)
#
def load_lattice(self, filePath=None, verbose=False):
"""
loads an original Genesis-style lattice into a standard_lattice
"""
if not filePath:
fname = os.path.join(self.path, self.param['maginfile'])
else:
fname = filePath
self.vprint('loading lattice: ', fname)
lat = parsers.parse_genesis_lattice(fname)
# Standardize
lat['eles'] = lattice.standard_eles_from_eles(lat['eles'])
self.input['lattice'] = lat
def write_beam(self, filePath=None):
if not self.beam:
return
if not filePath:
filePath = os.path.join(self.path, self.param['beamfile'])
writers.write_beam_file(filePath, self.beam, verbose=self.verbose)
def write_input(self):
"""
Writes all input files
"""
self.write_input_file()
self.write_beam()
self.write_lattice()
# Write the run script
self.get_run_script()
def write_input_file(self):
"""
Write parameters to main .in file
"""
lines = tools.namelist_lines(self.param, start='$newrun', end='$end')
with open(self.input_file, 'w') as f:
for line in lines:
f.write(line+'\n')
def write_lattice(self):
if not self.lattice:
self.input['lattice'] = None
else:
filePath = os.path.join(self.path, self.param['maginfile'])
print(self.path, self.param['maginfile'])
lattice.write_lattice(filePath, self.lattice)
self.vprint('Lattice written:', filePath)
def write_wavefront(self, h5=None):
"""
Write an openPMD wavefront from the dfl
"""
if not h5:
h5 = 'genesis_wavefront_'+self.fingerprint()+'.h5'
if isinstance(h5, str):
fname = os.path.expandvars(h5)
g = h5py.File(fname, 'w')
self.vprint(f'Writing wavefront (dfl data) to file {fname}')
else:
g = h5
dfl = self.output['data']['dfl']
param = self.output['param']
writers.write_openpmd_wavefront_h5(g, dfl=dfl, param=param)
return h5
def get_run_script(self, write_to_path=True):
"""
Assembles the run script. Optionally writes a file 'run' with this line to path.
"""
_, infile = os.path.split(self.input_file)
runscript = [self.genesis_bin, infile]
# Allow for MPI commands
if len(self.binary_prefixes) > 0:
runscript = self.binary_prefixes + runscript
if write_to_path:
filename = os.path.join(self.path, 'run')
with open(filename, 'w') as f:
f.write(' '.join(runscript))
tools.make_executable(filename)
return runscript
def run(self):
if not self.configured:
print('not configured to run')
return
self.run_genesis(verbose=self.verbose, timeout=self.timeout)
def run_genesis(self, verbose=False, parse_output=True, timeout=None):
# Check that binary exists
self.genesis_bin = tools.full_path(self.genesis_bin)
assert os.path.exists(self.genesis_bin), 'Genesis binary does not exist: '+ self.genesis_bin
# Clear old output
self.output = {}
run_info = self.output['run_info'] = {}
t1 = time()
run_info['start_time'] = t1
# Move to local directory
# Save init dir
init_dir = os.getcwd()
self.vprint('init dir: ', init_dir)
os.chdir(self.path)
# Debugging
self.vprint('Running genesis in '+os.getcwd())
# Write all input
self.write_input()
runscript = self.get_run_script()
run_info['run_script'] = ' '.join(runscript)
try:
if timeout:
res = tools.execute2(runscript, timeout=timeout)
log = res['log']
self.error = res['error']
run_info['why_error'] = res['why_error']
else:
# Interactive output, for Jupyter
log = []
for path in tools.execute(runscript):
self.vprint(path, end="")
log.append(path)
self.log = log
self.error = False
if parse_output:
self.load_output()
except Exception as ex:
print('Run Aborted', ex)
self.error = True
run_info['why_error'] = str(ex)
finally:
run_info['run_time'] = time() - t1
run_info['run_error'] = self.error
# Return to init_dir
os.chdir(init_dir)
self.finished = True
def fingerprint(self):
"""
Data fingerprint using the input.
"""
return tools.fingerprint(self.input)
def vprint(self, *args, **kwargs):
# Verbose print
if self.verbose:
print(*args, **kwargs)
def input_twiss(self):
betax = self['rxbeam']**2 * self['gamma0'] / self['emitx']
betay = self['rybeam']**2 * self['gamma0'] / self['emity']
alphax = self['alphax']
alphay = self['alphay']
return {'betax':betax, 'betay':betay, 'alphax':alphax, 'alphay':alphay}
def archive(self, h5=None):
"""
Archive all data to an h5 handle or filename.
If no file is given, a file based on the fingerprint will be created.
"""
if not h5:
h5 = 'genesis_'+self.fingerprint()+'.h5'
if isinstance(h5, str):
fname = os.path.expandvars(h5)
g = h5py.File(fname, 'w')
self.vprint(f'Archiving to file {fname}')
else:
g = h5
# Write basic attributes
archive.genesis_init(g)
# All input
archive.write_input_h5(g, self.input, name='input')
# All output
archive.write_output_h5(g, self.output, name='output', verbose=self.verbose)
return h5
def load_archive(self, h5, configure=True):
"""
Loads input and output from archived h5 file.
See: Genesis.archive
"""
if isinstance(h5, str):
fname = os.path.expandvars(h5)
g = h5py.File(fname, 'r')
glist = archive.find_genesis_archives(g)
n = len(glist)
if n == 0:
# legacy: try top level
message = 'legacy'
elif n == 1:
gname = glist[0]
message = f'group {gname} from'
g = g[gname]
else:
raise ValueError(f'Multiple archives found in file {fname}: {glist}')
self.vprint(f'Reading {message} archive file {h5}')
else:
g = h5
self.input = archive.read_input_h5(g['input'])
self.output = archive.read_output_h5(g['output'], verbose=self.verbose)
self.vprint('Loaded from archive. Note: Must reconfigure to run again.')
self.configured = False
if configure:
self.configure()
def copy(self):
"""
Returns a deep copy of this object.
If a tempdir is being used, will clear this and deconfigure.
"""
G2 = deepcopy(self)
# Clear this
if G2.use_tempdir:
G2.path = None
G2.configured = False
return G2
def __getitem__(self, key):
"""
Convenience syntax to get an attribute
See: __setitem__
"""
if key in self.param:
return self.param[key]
raise ValueError(f'{key} does not exist in input param')
def __setitem__(self, key, item):
"""
Convenience syntax to set input parameters
Example:
G['ncar'] = 251
"""
if key in self.param:
self.param[key] = item
else:
raise ValueError(f'{key} does not exist in input param')
def __str__(self):
path = self.path
s = ''
if self.finished:
s += 'Genesis finished in '+path
elif self.configured:
s += 'Genesis configured in '+path
else:
s += 'Genesis not configured.'
return s
| [
[
[
59,
66
],
[
12820,
12827
],
[
12909,
12916
],
[
12991,
12998
],
[
13412,
13419
],
[
13950,
13957
],
[
14006,
14013
]
],
[
[
68,
75
],
[
6823,
6830
],
[
8062,
8069
]
],
[
[
77,
84
],
[
4044,
4051
],
[
4214,
4221
],
[
4356,
4363
],
[
4642,
4649
],
[
4982,
4989
],
[
5231,
5238
],
[
5653,
5660
],
[
5927,
5934
],
[
6191,
6198
],
[
6742,
6749
]
],
[
[
86,
91
],
[
345,
350
],
[
637,
642
],
[
3794,
3799
],
[
7601,
7606
],
[
9436,
9441
],
[
9870,
9875
],
[
10652,
10657
],
[
10952,
10957
],
[
11742,
11747
]
],
[
[
93,
100
],
[
7136,
7143
],
[
8723,
8730
]
],
[
[
110,
114
],
[
8492,
8496
],
[
12657,
12661
],
[
13357,
13361
]
],
[
[
122,
130
],
[
2671,
2679
]
],
[
[
148,
152
],
[
10136,
10140
],
[
11417,
11421
]
],
[
[
160,
166
],
[
670,
676
]
],
[
[
174,
176
],
[
385,
387
],
[
708,
710
],
[
1459,
1461
],
[
3029,
3031
],
[
3706,
3708
],
[
3866,
3868
],
[
4824,
4826
],
[
4928,
4930
],
[
5165,
5167
],
[
5413,
5415
],
[
5861,
5863
],
[
6125,
6127
],
[
6562,
6564
],
[
7071,
7073
],
[
7947,
7949
],
[
8453,
8455
],
[
9023,
9025
],
[
9305,
9307
],
[
9919,
9921
],
[
10258,
10260
],
[
10331,
10333
],
[
10413,
10415
],
[
11534,
11536
],
[
12618,
12620
],
[
13318,
13320
]
],
[
[
184,
208
],
[
1595,
1619
]
],
[
[
902,
909
]
]
] |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
from telemetry.core import util
util.AddDirToPythonPath(
util.GetTelemetryDir(), 'third_party', 'websocket-client')
from websocket import create_connection # pylint: disable=W0611
from websocket import WebSocketException # pylint: disable=W0611
from websocket import WebSocketTimeoutException # pylint: disable=W0611
| [
[
[
185,
200
]
],
[
[
229,
233
],
[
235,
239
],
[
264,
268
]
],
[
[
345,
362
]
],
[
[
410,
428
]
],
[
[
476,
501
]
]
] |
# -*- coding: utf-8 -*-
import pytest
from wemake_python_styleguide.violations.best_practices import (
BaseExceptionViolation,
)
from wemake_python_styleguide.visitors.ast.keywords import (
WrongExceptionTypeVisitor,
)
use_base_exception = """
try:
execute()
except BaseException:
raise
"""
use_except_exception = """
try:
1 / 0
except Exception:
raise
"""
use_bare_except = """
try:
1 / 0
except:
raise
"""
@pytest.mark.parametrize('code', [
use_base_exception,
])
def test_use_base_exception(
assert_errors, parse_ast_tree, code, default_options,
):
"""Testing that `except BaseException:` is restricted."""
tree = parse_ast_tree(code)
visitor = WrongExceptionTypeVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [BaseExceptionViolation])
@pytest.mark.parametrize('code', [
use_except_exception,
use_bare_except,
])
def test_use_exception(
assert_errors, parse_ast_tree, code, default_options,
):
"""Testing that `except Exception:` and `except:` are allowed."""
tree = parse_ast_tree(code)
visitor = WrongExceptionTypeVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])
| [
[
[
32,
38
],
[
448,
454
],
[
837,
843
]
],
[
[
109,
131
],
[
809,
831
]
],
[
[
200,
225
],
[
708,
733
],
[
1123,
1148
]
],
[
[
230,
248
],
[
486,
504
]
],
[
[
311,
331
],
[
875,
895
]
],
[
[
386,
401
],
[
901,
916
]
],
[
[
513,
536
]
],
[
[
925,
943
]
]
] |
from .functional import *
| [
[
[
24,
25
]
]
] |
from . import ac
from . import q_learning
from . import rnnq_learning
AC = ac.ActorCritic
MFAC = ac.MFAC
IL = q_learning.DQN
MFQ = q_learning.MFQ
POMFQ = q_learning.POMFQ
rnnIL = rnnq_learning.DQN
rnnMFQ = rnnq_learning.MFQ
def spawn_ai(algo_name, sess, env, handle, human_name, max_steps):
if algo_name == 'mfq':
model = MFQ(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'mfac':
model = MFAC(sess, human_name, handle, env)
elif algo_name == 'ac':
model = AC(sess, human_name, handle, env)
elif algo_name == 'il':
model = IL(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'rnnIL':
model = rnnIL(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'rnnMFQ':
model = rnnMFQ(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'pomfq':
model = POMFQ(sess, human_name, handle, env, max_steps, memory_size=80000)
return model
| [
[
[
14,
16
],
[
75,
77
],
[
97,
99
]
],
[
[
31,
41
],
[
110,
120
],
[
131,
141
],
[
154,
164
]
],
[
[
56,
69
],
[
179,
192
],
[
206,
219
]
],
[
[
70,
72
],
[
526,
528
]
],
[
[
90,
94
],
[
446,
450
]
],
[
[
105,
107
],
[
604,
606
]
],
[
[
125,
128
],
[
335,
338
]
],
[
[
146,
151
],
[
946,
951
]
],
[
[
171,
176
],
[
715,
720
]
],
[
[
197,
203
],
[
831,
837
]
],
[
[
229,
237
]
]
] |
from lib.types import IStdin, IStdout
def main(stdin: IStdin, stdout: IStdout):
stdout.write('*** You are a student at PWN_University and you are all set to graduate at the end of the semester. Unfortunately the night before graduation you learned you were going to fail your last class and now you’re afraid the school wont let you graduate. Luckily you have a friend in IT and after hearing of your situation he casually sends you a message with the IP address for one of the schools secure servers. Your goal is to hack into the server and figure out a way to change your grade! ***\n')
stdout.write('\n')
stdout.write('You are requesting access to an offical PWN_University server. Only authorised individuals are allowed further.\n')
stdout.write('\n')
stdout.write('*** You remember one of your IT friends who works for the university keeps their username encoded on their desk incase they forget the spelling. So you go to their desk and find out its MTMzN3VzZXI= ***\n')
stdout.write('\n')
stdout.write('Enter your username: ')
stdout.flush()
username = stdin.readline().strip('\n')
if username == '1337user':
stdout.write('\n')
stdout.write('*** You then remember there was a data breach of all university passwords. Luckily PWN_University does not store their passwords in plain text, but rather in MD5 hashes. You navigate to the one associated with your friends username and it is 90f2c9c53f66540e67349e0ab83d8cd0 ***\n')
stdout.write('\n')
stdout.write('Now please enter your password: ')
stdout.flush()
password = stdin.readline().strip('\n')
if password == 'p@ssword':
stdout.write('Login Successful!\n')
stdout.write('\n')
stdout.write('*** Now that you have logged into the server you remember your IT friend implying that the database of grades is a mysql databse. Maybe you should try changing directories to where that is commonly stored (please use the full path) ***\n')
stdout.write('\n')
stdout.write('~$ ')
stdout.flush()
path = stdin.readline().strip('\n')
if path == 'cd /var/lib/mysql':
stdout.write('\n')
stdout.write('*** Wow it looks like your getting close you are now in the mysql directory. You run some SQL queries on the grades database and are able to select the string that says \'PWNER1337 has a F\'. All you have to do is replace F with an A (type in the SQL command to do this bellow) ***\n')
stdout.write('\n')
stdout.write('mysql> ')
stdout.flush()
sql = stdin.readline().strip('\n')
#if sql == 'REPLACE(\'PWNER1337 has a F\', \'F\', \'A\');':
if 'REPLACE' in sql and 'PWNER1337' in sql and 'F' in sql and 'A' in sql:
stdout.write('\n')
stdout.write('*** Congratulations you changed your grade from an F to an A. Unfortunatly the university caught you in the act, but because you were able to hack PWN_University they decided to let you graduate after all! ***\n')
stdout.write('\n')
stdout.write('*** Present this flag to the challenge oragnizer to claim your prize! flag{CI_NETSEC_1ST_COMP}\n')
else :
stdout.write('\n')
stdout.write('*** Oh no looks like you entered the wrong SQL command maybe you should try reconnecting to the server and try another answer... ***\n')
else :
stdout.write('\n')
stdout.write('*** Oh no looks like you entered the wrong path maybe you should try reconnecting to the server and try another answer... ***\n')
else :
stdout.write('\n')
stdout.write('Thats not the correct password access denied!\n')
stdout.write('*** Oh no looks like your access was denied maybe you should try reconnecting to the server and try another answer... ***\n')
else :
stdout.write('\n')
stdout.write('Thats not a valid username access denied!\n')
stdout.write('*** Oh no looks like your access was denied maybe you should try reconnecting to the server and try another answer... ***\n')
| [
[
[
22,
28
],
[
55,
61
]
],
[
[
30,
37
],
[
71,
78
]
],
[
[
43,
47
]
]
] |
"""Constants for the ISY994 Platform."""
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_COLD,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GAS,
DEVICE_CLASS_HEAT,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_PROBLEM,
DEVICE_CLASS_SAFETY,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_SOUND,
DEVICE_CLASS_VIBRATION,
DOMAIN as BINARY_SENSOR,
)
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
DOMAIN as CLIMATE,
FAN_AUTO,
FAN_HIGH,
FAN_MEDIUM,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
)
from homeassistant.components.cover import DOMAIN as COVER
from homeassistant.components.fan import DOMAIN as FAN
from homeassistant.components.light import DOMAIN as LIGHT
from homeassistant.components.lock import DOMAIN as LOCK
from homeassistant.components.sensor import DOMAIN as SENSOR
from homeassistant.components.switch import DOMAIN as SWITCH
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
DEGREE,
ENERGY_KILO_WATT_HOUR,
FREQUENCY_HERTZ,
LENGTH_CENTIMETERS,
LENGTH_FEET,
LENGTH_INCHES,
LENGTH_KILOMETERS,
LENGTH_METERS,
LENGTH_MILES,
MASS_KILOGRAMS,
MASS_POUNDS,
POWER_WATT,
PRESSURE_INHG,
SERVICE_LOCK,
SERVICE_UNLOCK,
SPEED_KILOMETERS_PER_HOUR,
SPEED_METERS_PER_SECOND,
SPEED_MILES_PER_HOUR,
STATE_CLOSED,
STATE_CLOSING,
STATE_LOCKED,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_OPENING,
STATE_PROBLEM,
STATE_UNKNOWN,
STATE_UNLOCKED,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
TEMP_KELVIN,
TIME_DAYS,
TIME_HOURS,
TIME_MILLISECONDS,
TIME_MINUTES,
TIME_MONTHS,
TIME_SECONDS,
TIME_YEARS,
UNIT_PERCENTAGE,
UV_INDEX,
VOLT,
VOLUME_GALLONS,
VOLUME_LITERS,
)
_LOGGER = logging.getLogger(__package__)
DOMAIN = "isy994"
MANUFACTURER = "Universal Devices, Inc"
CONF_IGNORE_STRING = "ignore_string"
CONF_SENSOR_STRING = "sensor_string"
CONF_VAR_SENSOR_STRING = "variable_sensor_string"
CONF_TLS_VER = "tls"
CONF_RESTORE_LIGHT_STATE = "restore_light_state"
DEFAULT_IGNORE_STRING = "{IGNORE ME}"
DEFAULT_SENSOR_STRING = "sensor"
DEFAULT_RESTORE_LIGHT_STATE = False
DEFAULT_TLS_VERSION = 1.1
DEFAULT_PROGRAM_STRING = "HA."
DEFAULT_VAR_SENSOR_STRING = "HA."
KEY_ACTIONS = "actions"
KEY_STATUS = "status"
SUPPORTED_PLATFORMS = [BINARY_SENSOR, SENSOR, LOCK, FAN, COVER, LIGHT, SWITCH, CLIMATE]
SUPPORTED_PROGRAM_PLATFORMS = [BINARY_SENSOR, LOCK, FAN, COVER, SWITCH]
SUPPORTED_BIN_SENS_CLASSES = ["moisture", "opening", "motion", "climate"]
# ISY Scenes are more like Switches than Home Assistant Scenes
# (they can turn off, and report their state)
ISY_GROUP_PLATFORM = SWITCH
ISY994_ISY = "isy"
ISY994_NODES = "isy994_nodes"
ISY994_PROGRAMS = "isy994_programs"
ISY994_VARIABLES = "isy994_variables"
FILTER_UOM = "uom"
FILTER_STATES = "states"
FILTER_NODE_DEF_ID = "node_def_id"
FILTER_INSTEON_TYPE = "insteon_type"
FILTER_ZWAVE_CAT = "zwave_cat"
# Special Subnodes for some Insteon Devices
SUBNODE_CLIMATE_COOL = 2
SUBNODE_CLIMATE_HEAT = 3
SUBNODE_DUSK_DAWN = 2
SUBNODE_EZIO2X4_SENSORS = [9, 10, 11, 12]
SUBNODE_FANLINC_LIGHT = 1
SUBNODE_HEARTBEAT = 4
SUBNODE_IOLINC_RELAY = 2
SUBNODE_LOW_BATTERY = 3
SUBNODE_MOTION_DISABLED = (13, 19) # Int->13 or Hex->0xD depending on firmware
SUBNODE_NEGATIVE = 2
SUBNODE_TAMPER = (10, 16) # Int->10 or Hex->0xA depending on firmware
# Generic Insteon Type Categories for Filters
TYPE_CATEGORY_CONTROLLERS = "0."
TYPE_CATEGORY_DIMMABLE = "1."
TYPE_CATEGORY_SWITCHED = "2."
TYPE_CATEGORY_IRRIGATION = "4."
TYPE_CATEGORY_CLIMATE = "5."
TYPE_CATEGORY_POOL_CTL = "6."
TYPE_CATEGORY_SENSOR_ACTUATORS = "7."
TYPE_CATEGORY_ENERGY_MGMT = "9."
TYPE_CATEGORY_COVER = "14."
TYPE_CATEGORY_LOCK = "15."
TYPE_CATEGORY_SAFETY = "16."
TYPE_CATEGORY_X10 = "113."
TYPE_EZIO2X4 = "7.3.255."
TYPE_INSTEON_MOTION = ("16.1.", "16.22.")
UNDO_UPDATE_LISTENER = "undo_update_listener"
# Used for discovery
UDN_UUID_PREFIX = "uuid:"
ISY_URL_POSTFIX = "/desc"
# Do not use the Home Assistant consts for the states here - we're matching exact API
# responses, not using them for Home Assistant states
# Insteon Types: https://www.universal-devices.com/developers/wsdk/5.0.4/1_fam.xml
# Z-Wave Categories: https://www.universal-devices.com/developers/wsdk/5.0.4/4_fam.xml
NODE_FILTERS = {
BINARY_SENSOR: {
FILTER_UOM: [],
FILTER_STATES: [],
FILTER_NODE_DEF_ID: [
"BinaryAlarm",
"BinaryAlarm_ADV",
"BinaryControl",
"BinaryControl_ADV",
"EZIO2x4_Input",
"EZRAIN_Input",
"OnOffControl",
"OnOffControl_ADV",
],
FILTER_INSTEON_TYPE: [
"7.0.",
"7.13.",
TYPE_CATEGORY_SAFETY,
], # Does a startswith() match; include the dot
FILTER_ZWAVE_CAT: (["104", "112", "138"] + list(map(str, range(148, 180)))),
},
SENSOR: {
# This is just a more-readable way of including MOST uoms between 1-100
# (Remember that range() is non-inclusive of the stop value)
FILTER_UOM: (
["1"]
+ list(map(str, range(3, 11)))
+ list(map(str, range(12, 51)))
+ list(map(str, range(52, 66)))
+ list(map(str, range(69, 78)))
+ ["79"]
+ list(map(str, range(82, 97)))
),
FILTER_STATES: [],
FILTER_NODE_DEF_ID: [
"IMETER_SOLO",
"EZIO2x4_Input_ADV",
"KeypadButton",
"KeypadButton_ADV",
"RemoteLinc2",
"RemoteLinc2_ADV",
],
FILTER_INSTEON_TYPE: ["0.16.", "0.17.", "0.18.", "9.0.", "9.7."],
FILTER_ZWAVE_CAT: (["118", "143"] + list(map(str, range(180, 185)))),
},
LOCK: {
FILTER_UOM: ["11"],
FILTER_STATES: ["locked", "unlocked"],
FILTER_NODE_DEF_ID: ["DoorLock"],
FILTER_INSTEON_TYPE: [TYPE_CATEGORY_LOCK, "4.64."],
FILTER_ZWAVE_CAT: ["111"],
},
FAN: {
FILTER_UOM: [],
FILTER_STATES: ["off", "low", "med", "high"],
FILTER_NODE_DEF_ID: ["FanLincMotor"],
FILTER_INSTEON_TYPE: ["1.46."],
FILTER_ZWAVE_CAT: [],
},
COVER: {
FILTER_UOM: ["97"],
FILTER_STATES: ["open", "closed", "closing", "opening", "stopped"],
FILTER_NODE_DEF_ID: [],
FILTER_INSTEON_TYPE: [],
FILTER_ZWAVE_CAT: [],
},
LIGHT: {
FILTER_UOM: ["51"],
FILTER_STATES: ["on", "off", "%"],
FILTER_NODE_DEF_ID: [
"BallastRelayLampSwitch",
"BallastRelayLampSwitch_ADV",
"DimmerLampOnly",
"DimmerLampSwitch",
"DimmerLampSwitch_ADV",
"DimmerSwitchOnly",
"DimmerSwitchOnly_ADV",
"KeypadDimmer",
"KeypadDimmer_ADV",
],
FILTER_INSTEON_TYPE: [TYPE_CATEGORY_DIMMABLE],
FILTER_ZWAVE_CAT: ["109", "119"],
},
SWITCH: {
FILTER_UOM: ["2", "78"],
FILTER_STATES: ["on", "off"],
FILTER_NODE_DEF_ID: [
"AlertModuleArmed",
"AlertModuleSiren",
"AlertModuleSiren_ADV",
"EZIO2x4_Output",
"EZRAIN_Output",
"KeypadRelay",
"KeypadRelay_ADV",
"RelayLampOnly",
"RelayLampOnly_ADV",
"RelayLampSwitch",
"RelayLampSwitch_ADV",
"RelaySwitchOnlyPlusQuery",
"RelaySwitchOnlyPlusQuery_ADV",
"Siren",
"Siren_ADV",
"X10",
],
FILTER_INSTEON_TYPE: [
TYPE_CATEGORY_SWITCHED,
"7.3.255.",
"9.10.",
"9.11.",
TYPE_CATEGORY_X10,
],
FILTER_ZWAVE_CAT: ["121", "122", "123", "137", "141", "147"],
},
CLIMATE: {
FILTER_UOM: ["2"],
FILTER_STATES: ["heating", "cooling", "idle", "fan_only", "off"],
FILTER_NODE_DEF_ID: ["TempLinc", "Thermostat"],
FILTER_INSTEON_TYPE: ["4.8", TYPE_CATEGORY_CLIMATE],
FILTER_ZWAVE_CAT: ["140"],
},
}
UOM_ISYV4_DEGREES = "degrees"
UOM_ISYV4_NONE = "n/a"
UOM_ISY_CELSIUS = 1
UOM_ISY_FAHRENHEIT = 2
UOM_DOUBLE_TEMP = "101"
UOM_HVAC_ACTIONS = "66"
UOM_HVAC_MODE_GENERIC = "67"
UOM_HVAC_MODE_INSTEON = "98"
UOM_FAN_MODES = "99"
UOM_INDEX = "25"
UOM_ON_OFF = "2"
UOM_FRIENDLY_NAME = {
"1": "A",
"3": f"btu/{TIME_HOURS}",
"4": TEMP_CELSIUS,
"5": LENGTH_CENTIMETERS,
"6": "ft³",
"7": f"ft³/{TIME_MINUTES}",
"8": "m³",
"9": TIME_DAYS,
"10": TIME_DAYS,
"12": "dB",
"13": "dB A",
"14": DEGREE,
"16": "macroseismic",
"17": TEMP_FAHRENHEIT,
"18": LENGTH_FEET,
"19": TIME_HOURS,
"20": TIME_HOURS,
"21": "%AH",
"22": "%RH",
"23": PRESSURE_INHG,
"24": f"{LENGTH_INCHES}/{TIME_HOURS}",
UOM_INDEX: "index", # Index type. Use "node.formatted" for value
"26": TEMP_KELVIN,
"27": "keyword",
"28": MASS_KILOGRAMS,
"29": "kV",
"30": "kW",
"31": "kPa",
"32": SPEED_KILOMETERS_PER_HOUR,
"33": ENERGY_KILO_WATT_HOUR,
"34": "liedu",
"35": VOLUME_LITERS,
"36": "lx",
"37": "mercalli",
"38": LENGTH_METERS,
"39": f"{LENGTH_METERS}³/{TIME_HOURS}",
"40": SPEED_METERS_PER_SECOND,
"41": "mA",
"42": TIME_MILLISECONDS,
"43": "mV",
"44": TIME_MINUTES,
"45": TIME_MINUTES,
"46": f"mm/{TIME_HOURS}",
"47": TIME_MONTHS,
"48": SPEED_MILES_PER_HOUR,
"49": SPEED_METERS_PER_SECOND,
"50": "Ω",
"51": UNIT_PERCENTAGE,
"52": MASS_POUNDS,
"53": "pf",
"54": CONCENTRATION_PARTS_PER_MILLION,
"55": "pulse count",
"57": TIME_SECONDS,
"58": TIME_SECONDS,
"59": "S/m",
"60": "m_b",
"61": "M_L",
"62": "M_w",
"63": "M_S",
"64": "shindo",
"65": "SML",
"69": VOLUME_GALLONS,
"71": UV_INDEX,
"72": VOLT,
"73": POWER_WATT,
"74": f"{POWER_WATT}/{LENGTH_METERS}²",
"75": "weekday",
"76": DEGREE,
"77": TIME_YEARS,
"82": "mm",
"83": LENGTH_KILOMETERS,
"85": "Ω",
"86": "kΩ",
"87": f"{LENGTH_METERS}³/{LENGTH_METERS}³",
"88": "Water activity",
"89": "RPM",
"90": FREQUENCY_HERTZ,
"91": DEGREE,
"92": f"{DEGREE} South",
"100": "", # Range 0-255, no unit.
UOM_DOUBLE_TEMP: UOM_DOUBLE_TEMP,
"102": "kWs",
"103": "$",
"104": "¢",
"105": LENGTH_INCHES,
"106": f"mm/{TIME_DAYS}",
"107": "", # raw 1-byte unsigned value
"108": "", # raw 2-byte unsigned value
"109": "", # raw 3-byte unsigned value
"110": "", # raw 4-byte unsigned value
"111": "", # raw 1-byte signed value
"112": "", # raw 2-byte signed value
"113": "", # raw 3-byte signed value
"114": "", # raw 4-byte signed value
"116": LENGTH_MILES,
"117": "mbar",
"118": "hPa",
"119": f"{POWER_WATT}{TIME_HOURS}",
"120": f"{LENGTH_INCHES}/{TIME_DAYS}",
}
UOM_TO_STATES = {
"11": { # Deadbolt Status
0: STATE_UNLOCKED,
100: STATE_LOCKED,
101: STATE_UNKNOWN,
102: STATE_PROBLEM,
},
"15": { # Door Lock Alarm
1: "master code changed",
2: "tamper code entry limit",
3: "escutcheon removed",
4: "key/manually locked",
5: "locked by touch",
6: "key/manually unlocked",
7: "remote locking jammed bolt",
8: "remotely locked",
9: "remotely unlocked",
10: "deadbolt jammed",
11: "battery too low to operate",
12: "critical low battery",
13: "low battery",
14: "automatically locked",
15: "automatic locking jammed bolt",
16: "remotely power cycled",
17: "lock handling complete",
19: "user deleted",
20: "user added",
21: "duplicate pin",
22: "jammed bolt by locking with keypad",
23: "locked by keypad",
24: "unlocked by keypad",
25: "keypad attempt outside schedule",
26: "hardware failure",
27: "factory reset",
},
UOM_HVAC_ACTIONS: { # Thermostat Heat/Cool State
0: CURRENT_HVAC_IDLE,
1: CURRENT_HVAC_HEAT,
2: CURRENT_HVAC_COOL,
3: CURRENT_HVAC_FAN,
4: CURRENT_HVAC_HEAT, # Pending Heat
5: CURRENT_HVAC_COOL, # Pending Cool
# >6 defined in ISY but not implemented, leaving for future expanision.
6: CURRENT_HVAC_IDLE,
7: CURRENT_HVAC_HEAT,
8: CURRENT_HVAC_HEAT,
9: CURRENT_HVAC_COOL,
10: CURRENT_HVAC_HEAT,
11: CURRENT_HVAC_HEAT,
},
UOM_HVAC_MODE_GENERIC: { # Thermostat Mode
0: HVAC_MODE_OFF,
1: HVAC_MODE_HEAT,
2: HVAC_MODE_COOL,
3: HVAC_MODE_AUTO,
4: PRESET_BOOST,
5: "resume",
6: HVAC_MODE_FAN_ONLY,
7: "furnace",
8: HVAC_MODE_DRY,
9: "moist air",
10: "auto changeover",
11: "energy save heat",
12: "energy save cool",
13: PRESET_AWAY,
14: HVAC_MODE_AUTO,
15: HVAC_MODE_AUTO,
16: HVAC_MODE_AUTO,
},
"68": { # Thermostat Fan Mode
0: FAN_AUTO,
1: FAN_ON,
2: FAN_HIGH, # Auto High
3: FAN_HIGH,
4: FAN_MEDIUM, # Auto Medium
5: FAN_MEDIUM,
6: "circulation",
7: "humidity circulation",
},
"78": {0: STATE_OFF, 100: STATE_ON}, # 0-Off 100-On
"79": {0: STATE_OPEN, 100: STATE_CLOSED}, # 0-Open 100-Close
"80": { # Thermostat Fan Run State
0: STATE_OFF,
1: STATE_ON,
2: "on high",
3: "on medium",
4: "circulation",
5: "humidity circulation",
6: "right/left circulation",
7: "up/down circulation",
8: "quiet circulation",
},
"84": {0: SERVICE_LOCK, 1: SERVICE_UNLOCK}, # Secure Mode
"93": { # Power Management Alarm
1: "power applied",
2: "ac mains disconnected",
3: "ac mains reconnected",
4: "surge detection",
5: "volt drop or drift",
6: "over current detected",
7: "over voltage detected",
8: "over load detected",
9: "load error",
10: "replace battery soon",
11: "replace battery now",
12: "battery is charging",
13: "battery is fully charged",
14: "charge battery soon",
15: "charge battery now",
},
"94": { # Appliance Alarm
1: "program started",
2: "program in progress",
3: "program completed",
4: "replace main filter",
5: "failure to set target temperature",
6: "supplying water",
7: "water supply failure",
8: "boiling",
9: "boiling failure",
10: "washing",
11: "washing failure",
12: "rinsing",
13: "rinsing failure",
14: "draining",
15: "draining failure",
16: "spinning",
17: "spinning failure",
18: "drying",
19: "drying failure",
20: "fan failure",
21: "compressor failure",
},
"95": { # Home Health Alarm
1: "leaving bed",
2: "sitting on bed",
3: "lying on bed",
4: "posture changed",
5: "sitting on edge of bed",
},
"96": { # VOC Level
1: "clean",
2: "slightly polluted",
3: "moderately polluted",
4: "highly polluted",
},
"97": { # Barrier Status
**{
0: STATE_CLOSED,
100: STATE_OPEN,
101: STATE_UNKNOWN,
102: "stopped",
103: STATE_CLOSING,
104: STATE_OPENING,
},
**{
b: f"{b} %" for a, b in enumerate(list(range(1, 100)))
}, # 1-99 are percentage open
},
UOM_HVAC_MODE_INSTEON: { # Insteon Thermostat Mode
0: HVAC_MODE_OFF,
1: HVAC_MODE_HEAT,
2: HVAC_MODE_COOL,
3: HVAC_MODE_HEAT_COOL,
4: HVAC_MODE_FAN_ONLY,
5: HVAC_MODE_AUTO, # Program Auto
6: HVAC_MODE_AUTO, # Program Heat-Set @ Local Device Only
7: HVAC_MODE_AUTO, # Program Cool-Set @ Local Device Only
},
UOM_FAN_MODES: {7: FAN_ON, 8: FAN_AUTO}, # Insteon Thermostat Fan Mode
"115": { # Most recent On style action taken for lamp control
0: "on",
1: "off",
2: "fade up",
3: "fade down",
4: "fade stop",
5: "fast on",
6: "fast off",
7: "triple press on",
8: "triple press off",
9: "4x press on",
10: "4x press off",
11: "5x press on",
12: "5x press off",
},
}
ISY_HVAC_MODES = [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_AUTO,
HVAC_MODE_FAN_ONLY,
]
HA_HVAC_TO_ISY = {
HVAC_MODE_OFF: "off",
HVAC_MODE_HEAT: "heat",
HVAC_MODE_COOL: "cool",
HVAC_MODE_HEAT_COOL: "auto",
HVAC_MODE_FAN_ONLY: "fan_only",
HVAC_MODE_AUTO: "program_auto",
}
HA_FAN_TO_ISY = {FAN_ON: "on", FAN_AUTO: "auto"}
BINARY_SENSOR_DEVICE_TYPES_ISY = {
DEVICE_CLASS_MOISTURE: ["16.8.", "16.13.", "16.14."],
DEVICE_CLASS_OPENING: [
"16.9.",
"16.6.",
"16.7.",
"16.2.",
"16.17.",
"16.20.",
"16.21.",
],
DEVICE_CLASS_MOTION: ["16.1.", "16.4.", "16.5.", "16.3.", "16.22."],
}
BINARY_SENSOR_DEVICE_TYPES_ZWAVE = {
DEVICE_CLASS_SAFETY: ["137", "172", "176", "177", "178"],
DEVICE_CLASS_SMOKE: ["138", "156"],
DEVICE_CLASS_PROBLEM: ["148", "149", "157", "158", "164", "174", "175"],
DEVICE_CLASS_GAS: ["150", "151"],
DEVICE_CLASS_SOUND: ["153"],
DEVICE_CLASS_COLD: ["152", "168"],
DEVICE_CLASS_HEAT: ["154", "166", "167"],
DEVICE_CLASS_MOISTURE: ["159", "169"],
DEVICE_CLASS_DOOR: ["160"],
DEVICE_CLASS_BATTERY: ["162"],
DEVICE_CLASS_MOTION: ["155"],
DEVICE_CLASS_VIBRATION: ["173"],
}
| [
[
[
48,
55
],
[
2122,
2129
]
],
[
[
114,
134
],
[
18189,
18209
]
],
[
[
140,
157
],
[
18029,
18046
]
],
[
[
163,
180
],
[
18157,
18174
]
],
[
[
186,
202
],
[
17958,
17974
]
],
[
[
208,
225
],
[
18068,
18085
]
],
[
[
231,
252
],
[
17451,
17472
],
[
18114,
18135
]
],
[
[
258,
277
],
[
17666,
17685
],
[
18224,
18243
]
],
[
[
283,
303
],
[
17509,
17529
]
],
[
[
309,
329
],
[
17881,
17901
]
],
[
[
335,
354
],
[
17779,
17798
]
],
[
[
360,
378
],
[
17841,
17859
]
],
[
[
384,
402
],
[
17996,
18014
]
],
[
[
408,
430
],
[
18258,
18280
]
],
[
[
436,
459
],
[
2678,
2691
],
[
2774,
2787
],
[
4663,
4676
]
],
[
[
520,
537
],
[
12563,
12580
],
[
12668,
12685
],
[
12884,
12901
]
],
[
[
543,
559
],
[
12593,
12609
]
],
[
[
565,
582
],
[
12533,
12550
],
[
12622,
12639
],
[
12824,
12841
],
[
12854,
12871
],
[
12915,
12932
],
[
12946,
12963
]
],
[
[
588,
605
],
[
12503,
12520
],
[
12794,
12811
]
],
[
[
611,
628
],
[
2734,
2741
],
[
8196,
8203
]
],
[
[
634,
642
],
[
13533,
13541
],
[
16564,
16572
],
[
17393,
17401
]
],
[
[
648,
656
],
[
13573,
13581
],
[
13607,
13615
]
],
[
[
662,
672
],
[
13628,
13638
],
[
13666,
13676
]
],
[
[
678,
684
],
[
13554,
13560
],
[
16553,
16559
],
[
17379,
17385
]
],
[
[
690,
704
],
[
13111,
13125
],
[
13408,
13422
],
[
13436,
13450
],
[
13464,
13478
],
[
16357,
16371
],
[
16400,
16414
],
[
16467,
16481
],
[
17110,
17124
],
[
17327,
17341
]
],
[
[
710,
724
],
[
13084,
13098
],
[
16267,
16281
],
[
17065,
17079
],
[
17230,
17244
]
],
[
[
730,
743
],
[
13237,
13250
]
],
[
[
749,
767
],
[
13184,
13202
],
[
16326,
16344
],
[
17130,
17148
],
[
17291,
17309
]
],
[
[
773,
787
],
[
13057,
13071
],
[
16240,
16254
],
[
17045,
17059
],
[
17202,
17216
]
],
[
[
793,
812
],
[
16294,
16313
],
[
17085,
17104
],
[
17258,
17277
]
],
[
[
818,
831
],
[
13031,
13044
],
[
16214,
16227
],
[
17026,
17039
],
[
17176,
17189
]
],
[
[
837,
848
],
[
13383,
13394
]
],
[
[
854,
866
],
[
13138,
13150
]
],
[
[
913,
928
],
[
2712,
2717
],
[
2800,
2805
],
[
6570,
6575
]
],
[
[
970,
983
],
[
2707,
2710
],
[
2795,
2798
],
[
6358,
6361
]
],
[
[
1027,
1042
],
[
2719,
2724
],
[
6789,
6794
]
],
[
[
1085,
1099
],
[
2701,
2705
],
[
2789,
2793
],
[
6127,
6131
]
],
[
[
1144,
1160
],
[
2693,
2699
],
[
5268,
5274
]
],
[
[
1205,
1221
],
[
2726,
2732
],
[
2807,
2813
],
[
3021,
3027
],
[
7324,
7330
]
],
[
[
1260,
1291
],
[
9991,
10022
]
],
[
[
1297,
1303
],
[
8996,
9002
],
[
10378,
10384
],
[
10614,
10620
],
[
10635,
10641
]
],
[
[
1309,
1330
],
[
9462,
9483
]
],
[
[
1336,
1351
],
[
10587,
10602
]
],
[
[
1357,
1375
],
[
8828,
8846
]
],
[
[
1381,
1392
],
[
9067,
9078
]
],
[
[
1398,
1411
],
[
9196,
9209
],
[
10790,
10803
],
[
11295,
11308
]
],
[
[
1417,
1434
],
[
10434,
10451
]
],
[
[
1440,
1453
],
[
9577,
9590
],
[
9605,
9618
],
[
10329,
10342
],
[
10497,
10510
],
[
10515,
10528
]
],
[
[
1459,
1471
],
[
11190,
11202
]
],
[
[
1477,
1491
],
[
9350,
9364
]
],
[
[
1497,
1508
],
[
9952,
9963
]
],
[
[
1514,
1524
],
[
10291,
10301
],
[
10316,
10326
],
[
11255,
11265
]
],
[
[
1530,
1543
],
[
9168,
9181
]
],
[
[
1549,
1561
],
[
14183,
14195
]
],
[
[
1567,
1581
],
[
14200,
14214
]
],
[
[
1587,
1612
],
[
9425,
9450
]
],
[
[
1618,
1641
],
[
9646,
9669
],
[
9875,
9898
]
],
[
[
1647,
1667
],
[
9843,
9863
]
],
[
[
1673,
1685
],
[
13834,
13846
],
[
15844,
15856
]
],
[
[
1691,
1704
],
[
15964,
15977
]
],
[
[
1710,
1722
],
[
11416,
11428
]
],
[
[
1728,
1737
],
[
13760,
13769
],
[
13920,
13929
]
],
[
[
1743,
1751
],
[
13776,
13784
],
[
13942,
13950
]
],
[
[
1757,
1767
],
[
13817,
13827
],
[
15875,
15885
]
],
[
[
1773,
1786
],
[
15996,
16009
]
],
[
[
1792,
1805
],
[
11471,
11484
]
],
[
[
1811,
1824
],
[
11443,
11456
],
[
15904,
15917
]
],
[
[
1830,
1844
],
[
11387,
11401
]
],
[
[
1850,
1862
],
[
8805,
8817
]
],
[
[
1868,
1883
],
[
9040,
9055
]
],
[
[
1889,
1900
],
[
9306,
9317
]
],
[
[
1906,
1915
],
[
8920,
8929
],
[
8941,
8950
],
[
10822,
10831
],
[
11311,
11320
]
],
[
[
1921,
1931
],
[
8782,
8792
],
[
9090,
9100
],
[
9112,
9122
],
[
9212,
9222
],
[
9623,
9633
],
[
9796,
9806
],
[
11267,
11277
]
],
[
[
1937,
1954
],
[
9697,
9714
]
],
[
[
1960,
1972
],
[
8881,
8893
],
[
9742,
9754
],
[
9766,
9778
]
],
[
[
1978,
1989
],
[
9820,
9831
]
],
[
[
1995,
2007
],
[
10059,
10071
],
[
10083,
10095
]
],
[
[
2013,
2023
],
[
10396,
10406
]
],
[
[
2029,
2044
],
[
9925,
9940
]
],
[
[
2050,
2058
],
[
10255,
10263
]
],
[
[
2064,
2068
],
[
10275,
10279
]
],
[
[
2074,
2088
],
[
10229,
10243
]
],
[
[
2094,
2107
],
[
9514,
9527
]
],
[
[
2112,
2119
]
],
[
[
2154,
2160
]
],
[
[
2173,
2185
]
],
[
[
2214,
2232
]
],
[
[
2251,
2269
]
],
[
[
2288,
2310
]
],
[
[
2338,
2350
]
],
[
[
2359,
2383
]
],
[
[
2409,
2430
]
],
[
[
2447,
2468
]
],
[
[
2480,
2507
]
],
[
[
2516,
2535
]
],
[
[
2542,
2564
]
],
[
[
2573,
2598
]
],
[
[
2608,
2619
]
],
[
[
2632,
2642
]
],
[
[
2655,
2674
]
],
[
[
2743,
2770
]
],
[
[
2816,
2842
]
],
[
[
3000,
3018
]
],
[
[
3029,
3039
]
],
[
[
3048,
3060
]
],
[
[
3078,
3093
]
],
[
[
3114,
3130
]
],
[
[
3153,
3163
],
[
4688,
4698
],
[
5435,
5445
],
[
6143,
6153
],
[
6373,
6383
],
[
6587,
6597
],
[
6806,
6816
],
[
7342,
7352
],
[
8215,
8225
]
],
[
[
3172,
3185
],
[
4712,
4725
],
[
5726,
5739
],
[
6171,
6184
],
[
6397,
6410
],
[
6615,
6628
],
[
6834,
6847
],
[
7375,
7388
],
[
8242,
8255
]
],
[
[
3197,
3215
],
[
4739,
4757
],
[
5753,
5771
],
[
6218,
6236
],
[
6451,
6469
],
[
6691,
6709
],
[
6877,
6895
],
[
7413,
7431
],
[
8316,
8334
]
],
[
[
3232,
3251
],
[
5017,
5036
],
[
5972,
5991
],
[
6260,
6279
],
[
6497,
6516
],
[
6723,
6742
],
[
7224,
7243
],
[
7948,
7967
],
[
8372,
8391
]
],
[
[
3269,
3285
],
[
5180,
5196
],
[
6046,
6062
],
[
6320,
6336
],
[
6537,
6553
],
[
6756,
6772
],
[
7279,
7295
],
[
8123,
8139
],
[
8433,
8449
]
],
[
[
3345,
3365
]
],
[
[
3370,
3390
]
],
[
[
3395,
3412
]
],
[
[
3417,
3440
]
],
[
[
3459,
3480
]
],
[
[
3485,
3502
]
],
[
[
3507,
3527
]
],
[
[
3532,
3551
]
],
[
[
3556,
3579
]
],
[
[
3636,
3652
]
],
[
[
3657,
3671
]
],
[
[
3775,
3800
]
],
[
[
3808,
3830
],
[
7246,
7268
]
],
[
[
3838,
3860
],
[
7983,
8005
]
],
[
[
3868,
3892
]
],
[
[
3900,
3921
],
[
8401,
8422
]
],
[
[
3929,
3951
]
],
[
[
3959,
3989
]
],
[
[
3997,
4022
]
],
[
[
4030,
4049
]
],
[
[
4058,
4076
],
[
6282,
6300
]
],
[
[
4085,
4105
],
[
5093,
5113
]
],
[
[
4114,
4131
],
[
8085,
8102
]
],
[
[
4142,
4154
]
],
[
[
4168,
4187
]
],
[
[
4211,
4231
]
],
[
[
4279,
4294
]
],
[
[
4305,
4320
]
],
[
[
4642,
4654
]
],
[
[
8470,
8487
]
],
[
[
8500,
8514
]
],
[
[
8524,
8539
]
],
[
[
8544,
8562
]
],
[
[
8568,
8583
],
[
10695,
10710
],
[
10712,
10727
]
],
[
[
8592,
8608
],
[
12442,
12458
]
],
[
[
8616,
8637
],
[
12976,
12997
]
],
[
[
8645,
8666
],
[
16151,
16172
]
],
[
[
8674,
8687
],
[
16534,
16547
]
],
[
[
8695,
8704
],
[
9230,
9239
]
],
[
[
8712,
8722
]
],
[
[
8730,
8747
]
],
[
[
11327,
11340
]
],
[
[
17003,
17017
]
],
[
[
17153,
17167
]
],
[
[
17362,
17375
]
],
[
[
17412,
17442
]
],
[
[
17738,
17770
]
]
] |
from scfmsp.controlflowanalysis.StatusRegister import StatusRegister
from scfmsp.controlflowanalysis.instructions.AbstractInstructionBranching import AbstractInstructionBranching
class InstructionJz(AbstractInstructionBranching):
name = 'jz'
def get_execution_time(self):
return 2
def get_branching_condition_domain(self, ac):
return ac.sra.get(StatusRegister.ZERO)
| [
[
[
54,
68
],
[
377,
391
]
],
[
[
150,
178
],
[
201,
229
]
],
[
[
187,
200
]
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.